diff --git a/.clang-format-ignore b/.clang-format-ignore index 455c341089f0..de16d750b2c1 100644 --- a/.clang-format-ignore +++ b/.clang-format-ignore @@ -15,4 +15,5 @@ # mini_webgpu.h is copied from upstream with some local mods ./src/runtime/mini_webgpu.h ./dependencies/spirv +./dependencies/vulkan *.fbs.h diff --git a/CMakeLists.txt b/CMakeLists.txt index c92d27190ae1..ffb68a0a2dc2 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -16,7 +16,7 @@ if (NOT DEFINED VCPKG_MANIFEST_FEATURES) endif () project(Halide - VERSION 19.0.0 + VERSION 20.0.0 DESCRIPTION "Halide compiler and libraries" HOMEPAGE_URL "https://halide-lang.org") @@ -242,6 +242,7 @@ Halide_feature(WITH_TESTS "Halide's unit test suite" TOP_LEVEL) Halide_feature(WITH_TUTORIALS "Halide's tutorial code" TOP_LEVEL) Halide_feature(WITH_UTILS "Optional utility programs for Halide, including HalideTraceViz" TOP_LEVEL) + ## # Add source directories diff --git a/LICENSE.txt b/LICENSE.txt index 9faf448676b1..7bd7533d97da 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -198,7 +198,7 @@ IN THE MATERIALS. ---- -src/mini_vulkan.h is Copyright (c) 2014-2017 The Khronos Group Inc. +dependencies/vulkan is Copyright (c) 2014-2017 The Khronos Group Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/Makefile b/Makefile index 35446c048a2e..17615a6e807a 100644 --- a/Makefile +++ b/Makefile @@ -170,7 +170,7 @@ HEXAGON_LLVM_CONFIG_LIB=$(if $(WITH_HEXAGON), hexagon, ) SPIRV_CXX_FLAGS=$(if $(WITH_SPIRV), -DWITH_SPIRV -isystem $(ROOT_DIR)/dependencies/spirv/include, ) SPIRV_LLVM_CONFIG_LIB=$(if $(WITH_SPIRV), , ) -VULKAN_CXX_FLAGS=$(if $(WITH_VULKAN), -DWITH_VULKAN, ) +VULKAN_CXX_FLAGS=$(if $(WITH_VULKAN), -DWITH_VULKAN -isystem $(ROOT_DIR)/dependencies/vulkan/include, ) VULKAN_LLVM_CONFIG_LIB=$(if $(WITH_VULKAN), , ) WEBASSEMBLY_CXX_FLAGS=$(if $(WITH_WEBASSEMBLY), -DWITH_WEBASSEMBLY, ) @@ -1097,7 +1097,8 @@ RUNTIME_CXX_FLAGS = \ -Wno-unused-function \ -Wvla \ -Wsign-compare \ - -Wno-sync-alignment + -Wno-sync-alignment \ + -isystem $(ROOT_DIR)/dependencies/vulkan/include $(BUILD_DIR)/initmod.windows_%_x86_32.ll: $(SRC_DIR)/runtime/windows_%_x86.cpp $(BUILD_DIR)/clang_ok @mkdir -p $(@D) diff --git a/dependencies/README.md b/dependencies/README.md index d59dfd2dba79..ebc617569384 100644 --- a/dependencies/README.md +++ b/dependencies/README.md @@ -18,3 +18,19 @@ build it, and extract the necessary files. It takes a single argument, the name of the branch to clone. [SPIR-V]: https://www.khronos.org/registry/spir-v + +## Vulkan + +This folder contains a copy of the officially released v1.3.296 SDK headers +for [Vulkan], obtained from the `vulkan-sdk-v1.3.296` branch +of https://github.com/KhronosGroup/Vulkan-Headers. + +The directory structure within this folder matches that of the official +version's install tree, plus the upstream `LICENSE.md` notice, minus files +that Halide doesn't need. + +The `update-vulkan.sh` script will automatically acquire the upstream repo, +build it, and extract the necessary files. It takes a single argument, the +name of the branch to clone. + +[Vulkan]: https://www.vulkan.org/ diff --git a/dependencies/update-vulkan.sh b/dependencies/update-vulkan.sh new file mode 100755 index 000000000000..3566f046bc77 --- /dev/null +++ b/dependencies/update-vulkan.sh @@ -0,0 +1,41 @@ +#!/bin/bash + +set -eo pipefail + +cd -- "$(dirname -- "$0")" || exit 1 + +GIT_BRANCH="$1" +if [ -z "$GIT_BRANCH" ]; then + echo "error: usage: $0 " + echo "remark: the current is vulkan-sdk-1.3.296" + exit 1 +fi + +mkdir -p vulkan + +cleanup () { + rm -rf Vulkan-Headers +} + +trap cleanup SIGINT SIGTERM EXIT + +git clone https://github.com/KhronosGroup/Vulkan-Headers.git --branch "$GIT_BRANCH" +cmake -S Vulkan-Headers -B Vulkan-Headers/build -DCMAKE_BUILD_TYPE=Release "-DCMAKE_INSTALL_PREFIX=$PWD/Vulkan-Headers/_local" +cmake --build Vulkan-Headers/build --target install + +# Copy license information +cp Vulkan-Headers/LICENSE.md vulkan/ + +# Copy headers we care about (only the ANSI-C core interfaces!) +mkdir -p vulkan/include/vulkan +cp Vulkan-Headers/_local/include/vulkan/*.h vulkan/include/vulkan/ +mkdir -p vulkan/include/vk_video +cp Vulkan-Headers/_local/include/vk_video/*.h vulkan/include/vk_video/ + +# Copy CMake config +mkdir -p vulkan/share/ +cp -R Vulkan-Headers/_local/share/cmake vulkan/share/ + +git add -f vulkan/ + +echo "Updated Vulkan-Headers to branch $GIT_BRANCH!" diff --git a/dependencies/vulkan/LICENSE.md b/dependencies/vulkan/LICENSE.md new file mode 100644 index 000000000000..d6a0648b652a --- /dev/null +++ b/dependencies/vulkan/LICENSE.md @@ -0,0 +1,18 @@ +Copyright 2015-2023 The Khronos Group Inc. + +Files in this repository fall under one of these licenses: + +- `Apache-2.0` +- `MIT` + +Note: With the exception of `parse_dependency.py` the files using `MIT` license +also fall under `Apache-2.0`. Example: + +``` +SPDX-License-Identifier: Apache-2.0 OR MIT +``` + +Full license text of these licenses is available at: + + * Apache-2.0: https://opensource.org/licenses/Apache-2.0 + * MIT: https://opensource.org/licenses/MIT diff --git a/dependencies/vulkan/include/vk_video/vulkan_video_codec_av1std.h b/dependencies/vulkan/include/vk_video/vulkan_video_codec_av1std.h new file mode 100644 index 000000000000..8ce283e8a8a4 --- /dev/null +++ b/dependencies/vulkan/include/vk_video/vulkan_video_codec_av1std.h @@ -0,0 +1,392 @@ +#ifndef VULKAN_VIDEO_CODEC_AV1STD_H_ +#define VULKAN_VIDEO_CODEC_AV1STD_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_av1std is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_av1std 1 +#include "vulkan_video_codecs_common.h" +#define STD_VIDEO_AV1_NUM_REF_FRAMES 8 +#define STD_VIDEO_AV1_REFS_PER_FRAME 7 +#define STD_VIDEO_AV1_TOTAL_REFS_PER_FRAME 8 +#define STD_VIDEO_AV1_MAX_TILE_COLS 64 +#define STD_VIDEO_AV1_MAX_TILE_ROWS 64 +#define STD_VIDEO_AV1_MAX_SEGMENTS 8 +#define STD_VIDEO_AV1_SEG_LVL_MAX 8 +#define STD_VIDEO_AV1_PRIMARY_REF_NONE 7 +#define STD_VIDEO_AV1_SELECT_INTEGER_MV 2 +#define STD_VIDEO_AV1_SELECT_SCREEN_CONTENT_TOOLS 2 +#define STD_VIDEO_AV1_SKIP_MODE_FRAMES 2 +#define STD_VIDEO_AV1_MAX_LOOP_FILTER_STRENGTHS 4 +#define STD_VIDEO_AV1_LOOP_FILTER_ADJUSTMENTS 2 +#define STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS 8 +#define STD_VIDEO_AV1_MAX_NUM_PLANES 3 +#define STD_VIDEO_AV1_GLOBAL_MOTION_PARAMS 6 +#define STD_VIDEO_AV1_MAX_NUM_Y_POINTS 14 +#define STD_VIDEO_AV1_MAX_NUM_CB_POINTS 10 +#define STD_VIDEO_AV1_MAX_NUM_CR_POINTS 10 +#define STD_VIDEO_AV1_MAX_NUM_POS_LUMA 24 +#define STD_VIDEO_AV1_MAX_NUM_POS_CHROMA 25 + +typedef enum StdVideoAV1Profile { + STD_VIDEO_AV1_PROFILE_MAIN = 0, + STD_VIDEO_AV1_PROFILE_HIGH = 1, + STD_VIDEO_AV1_PROFILE_PROFESSIONAL = 2, + STD_VIDEO_AV1_PROFILE_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_PROFILE_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1Profile; + +typedef enum StdVideoAV1Level { + STD_VIDEO_AV1_LEVEL_2_0 = 0, + STD_VIDEO_AV1_LEVEL_2_1 = 1, + STD_VIDEO_AV1_LEVEL_2_2 = 2, + STD_VIDEO_AV1_LEVEL_2_3 = 3, + STD_VIDEO_AV1_LEVEL_3_0 = 4, + STD_VIDEO_AV1_LEVEL_3_1 = 5, + STD_VIDEO_AV1_LEVEL_3_2 = 6, + STD_VIDEO_AV1_LEVEL_3_3 = 7, + STD_VIDEO_AV1_LEVEL_4_0 = 8, + STD_VIDEO_AV1_LEVEL_4_1 = 9, + STD_VIDEO_AV1_LEVEL_4_2 = 10, + STD_VIDEO_AV1_LEVEL_4_3 = 11, + STD_VIDEO_AV1_LEVEL_5_0 = 12, + STD_VIDEO_AV1_LEVEL_5_1 = 13, + STD_VIDEO_AV1_LEVEL_5_2 = 14, + STD_VIDEO_AV1_LEVEL_5_3 = 15, + STD_VIDEO_AV1_LEVEL_6_0 = 16, + STD_VIDEO_AV1_LEVEL_6_1 = 17, + STD_VIDEO_AV1_LEVEL_6_2 = 18, + STD_VIDEO_AV1_LEVEL_6_3 = 19, + STD_VIDEO_AV1_LEVEL_7_0 = 20, + STD_VIDEO_AV1_LEVEL_7_1 = 21, + STD_VIDEO_AV1_LEVEL_7_2 = 22, + STD_VIDEO_AV1_LEVEL_7_3 = 23, + STD_VIDEO_AV1_LEVEL_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_LEVEL_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1Level; + +typedef enum StdVideoAV1FrameType { + STD_VIDEO_AV1_FRAME_TYPE_KEY = 0, + STD_VIDEO_AV1_FRAME_TYPE_INTER = 1, + STD_VIDEO_AV1_FRAME_TYPE_INTRA_ONLY = 2, + STD_VIDEO_AV1_FRAME_TYPE_SWITCH = 3, + STD_VIDEO_AV1_FRAME_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_FRAME_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1FrameType; + +typedef enum StdVideoAV1ReferenceName { + STD_VIDEO_AV1_REFERENCE_NAME_INTRA_FRAME = 0, + STD_VIDEO_AV1_REFERENCE_NAME_LAST_FRAME = 1, + STD_VIDEO_AV1_REFERENCE_NAME_LAST2_FRAME = 2, + STD_VIDEO_AV1_REFERENCE_NAME_LAST3_FRAME = 3, + STD_VIDEO_AV1_REFERENCE_NAME_GOLDEN_FRAME = 4, + STD_VIDEO_AV1_REFERENCE_NAME_BWDREF_FRAME = 5, + STD_VIDEO_AV1_REFERENCE_NAME_ALTREF2_FRAME = 6, + STD_VIDEO_AV1_REFERENCE_NAME_ALTREF_FRAME = 7, + STD_VIDEO_AV1_REFERENCE_NAME_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_REFERENCE_NAME_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1ReferenceName; + +typedef enum StdVideoAV1InterpolationFilter { + STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP = 0, + STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP_SMOOTH = 1, + STD_VIDEO_AV1_INTERPOLATION_FILTER_EIGHTTAP_SHARP = 2, + STD_VIDEO_AV1_INTERPOLATION_FILTER_BILINEAR = 3, + STD_VIDEO_AV1_INTERPOLATION_FILTER_SWITCHABLE = 4, + STD_VIDEO_AV1_INTERPOLATION_FILTER_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_INTERPOLATION_FILTER_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1InterpolationFilter; + +typedef enum StdVideoAV1TxMode { + STD_VIDEO_AV1_TX_MODE_ONLY_4X4 = 0, + STD_VIDEO_AV1_TX_MODE_LARGEST = 1, + STD_VIDEO_AV1_TX_MODE_SELECT = 2, + STD_VIDEO_AV1_TX_MODE_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_TX_MODE_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1TxMode; + +typedef enum StdVideoAV1FrameRestorationType { + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_NONE = 0, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_WIENER = 1, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_SGRPROJ = 2, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_SWITCHABLE = 3, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_FRAME_RESTORATION_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1FrameRestorationType; + +typedef enum StdVideoAV1ColorPrimaries { + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_709 = 1, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_UNSPECIFIED = 2, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_M = 4, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_470_B_G = 5, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_601 = 6, + STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_240 = 7, + STD_VIDEO_AV1_COLOR_PRIMARIES_GENERIC_FILM = 8, + STD_VIDEO_AV1_COLOR_PRIMARIES_BT_2020 = 9, + STD_VIDEO_AV1_COLOR_PRIMARIES_XYZ = 10, + STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_431 = 11, + STD_VIDEO_AV1_COLOR_PRIMARIES_SMPTE_432 = 12, + STD_VIDEO_AV1_COLOR_PRIMARIES_EBU_3213 = 22, + STD_VIDEO_AV1_COLOR_PRIMARIES_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_COLOR_PRIMARIES_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1ColorPrimaries; + +typedef enum StdVideoAV1TransferCharacteristics { + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_RESERVED_0 = 0, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_709 = 1, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_UNSPECIFIED = 2, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_RESERVED_3 = 3, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_470_M = 4, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_470_B_G = 5, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_601 = 6, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_240 = 7, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LINEAR = 8, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LOG_100 = 9, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_LOG_100_SQRT10 = 10, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_IEC_61966 = 11, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_1361 = 12, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SRGB = 13, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_2020_10_BIT = 14, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_BT_2020_12_BIT = 15, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_2084 = 16, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_SMPTE_428 = 17, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_HLG = 18, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_TRANSFER_CHARACTERISTICS_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1TransferCharacteristics; + +typedef enum StdVideoAV1MatrixCoefficients { + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_IDENTITY = 0, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_709 = 1, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_UNSPECIFIED = 2, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_RESERVED_3 = 3, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_FCC = 4, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_470_B_G = 5, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_601 = 6, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_240 = 7, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_YCGCO = 8, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_2020_NCL = 9, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_BT_2020_CL = 10, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_SMPTE_2085 = 11, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_CHROMAT_NCL = 12, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_CHROMAT_CL = 13, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_ICTCP = 14, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_MATRIX_COEFFICIENTS_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1MatrixCoefficients; + +typedef enum StdVideoAV1ChromaSamplePosition { + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_UNKNOWN = 0, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_VERTICAL = 1, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_COLOCATED = 2, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_RESERVED = 3, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_INVALID = 0x7FFFFFFF, + STD_VIDEO_AV1_CHROMA_SAMPLE_POSITION_MAX_ENUM = 0x7FFFFFFF +} StdVideoAV1ChromaSamplePosition; +typedef struct StdVideoAV1ColorConfigFlags { + uint32_t mono_chrome : 1; + uint32_t color_range : 1; + uint32_t separate_uv_delta_q : 1; + uint32_t color_description_present_flag : 1; + uint32_t reserved : 28; +} StdVideoAV1ColorConfigFlags; + +typedef struct StdVideoAV1ColorConfig { + StdVideoAV1ColorConfigFlags flags; + uint8_t BitDepth; + uint8_t subsampling_x; + uint8_t subsampling_y; + uint8_t reserved1; + StdVideoAV1ColorPrimaries color_primaries; + StdVideoAV1TransferCharacteristics transfer_characteristics; + StdVideoAV1MatrixCoefficients matrix_coefficients; + StdVideoAV1ChromaSamplePosition chroma_sample_position; +} StdVideoAV1ColorConfig; + +typedef struct StdVideoAV1TimingInfoFlags { + uint32_t equal_picture_interval : 1; + uint32_t reserved : 31; +} StdVideoAV1TimingInfoFlags; + +typedef struct StdVideoAV1TimingInfo { + StdVideoAV1TimingInfoFlags flags; + uint32_t num_units_in_display_tick; + uint32_t time_scale; + uint32_t num_ticks_per_picture_minus_1; +} StdVideoAV1TimingInfo; + +typedef struct StdVideoAV1LoopFilterFlags { + uint32_t loop_filter_delta_enabled : 1; + uint32_t loop_filter_delta_update : 1; + uint32_t reserved : 30; +} StdVideoAV1LoopFilterFlags; + +typedef struct StdVideoAV1LoopFilter { + StdVideoAV1LoopFilterFlags flags; + uint8_t loop_filter_level[STD_VIDEO_AV1_MAX_LOOP_FILTER_STRENGTHS]; + uint8_t loop_filter_sharpness; + uint8_t update_ref_delta; + int8_t loop_filter_ref_deltas[STD_VIDEO_AV1_TOTAL_REFS_PER_FRAME]; + uint8_t update_mode_delta; + int8_t loop_filter_mode_deltas[STD_VIDEO_AV1_LOOP_FILTER_ADJUSTMENTS]; +} StdVideoAV1LoopFilter; + +typedef struct StdVideoAV1QuantizationFlags { + uint32_t using_qmatrix : 1; + uint32_t diff_uv_delta : 1; + uint32_t reserved : 30; +} StdVideoAV1QuantizationFlags; + +typedef struct StdVideoAV1Quantization { + StdVideoAV1QuantizationFlags flags; + uint8_t base_q_idx; + int8_t DeltaQYDc; + int8_t DeltaQUDc; + int8_t DeltaQUAc; + int8_t DeltaQVDc; + int8_t DeltaQVAc; + uint8_t qm_y; + uint8_t qm_u; + uint8_t qm_v; +} StdVideoAV1Quantization; + +typedef struct StdVideoAV1Segmentation { + uint8_t FeatureEnabled[STD_VIDEO_AV1_MAX_SEGMENTS]; + int16_t FeatureData[STD_VIDEO_AV1_MAX_SEGMENTS][STD_VIDEO_AV1_SEG_LVL_MAX]; +} StdVideoAV1Segmentation; + +typedef struct StdVideoAV1TileInfoFlags { + uint32_t uniform_tile_spacing_flag : 1; + uint32_t reserved : 31; +} StdVideoAV1TileInfoFlags; + +typedef struct StdVideoAV1TileInfo { + StdVideoAV1TileInfoFlags flags; + uint8_t TileCols; + uint8_t TileRows; + uint16_t context_update_tile_id; + uint8_t tile_size_bytes_minus_1; + uint8_t reserved1[7]; + const uint16_t* pMiColStarts; + const uint16_t* pMiRowStarts; + const uint16_t* pWidthInSbsMinus1; + const uint16_t* pHeightInSbsMinus1; +} StdVideoAV1TileInfo; + +typedef struct StdVideoAV1CDEF { + uint8_t cdef_damping_minus_3; + uint8_t cdef_bits; + uint8_t cdef_y_pri_strength[STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS]; + uint8_t cdef_y_sec_strength[STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS]; + uint8_t cdef_uv_pri_strength[STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS]; + uint8_t cdef_uv_sec_strength[STD_VIDEO_AV1_MAX_CDEF_FILTER_STRENGTHS]; +} StdVideoAV1CDEF; + +typedef struct StdVideoAV1LoopRestoration { + StdVideoAV1FrameRestorationType FrameRestorationType[STD_VIDEO_AV1_MAX_NUM_PLANES]; + uint16_t LoopRestorationSize[STD_VIDEO_AV1_MAX_NUM_PLANES]; +} StdVideoAV1LoopRestoration; + +typedef struct StdVideoAV1GlobalMotion { + uint8_t GmType[STD_VIDEO_AV1_NUM_REF_FRAMES]; + int32_t gm_params[STD_VIDEO_AV1_NUM_REF_FRAMES][STD_VIDEO_AV1_GLOBAL_MOTION_PARAMS]; +} StdVideoAV1GlobalMotion; + +typedef struct StdVideoAV1FilmGrainFlags { + uint32_t chroma_scaling_from_luma : 1; + uint32_t overlap_flag : 1; + uint32_t clip_to_restricted_range : 1; + uint32_t update_grain : 1; + uint32_t reserved : 28; +} StdVideoAV1FilmGrainFlags; + +typedef struct StdVideoAV1FilmGrain { + StdVideoAV1FilmGrainFlags flags; + uint8_t grain_scaling_minus_8; + uint8_t ar_coeff_lag; + uint8_t ar_coeff_shift_minus_6; + uint8_t grain_scale_shift; + uint16_t grain_seed; + uint8_t film_grain_params_ref_idx; + uint8_t num_y_points; + uint8_t point_y_value[STD_VIDEO_AV1_MAX_NUM_Y_POINTS]; + uint8_t point_y_scaling[STD_VIDEO_AV1_MAX_NUM_Y_POINTS]; + uint8_t num_cb_points; + uint8_t point_cb_value[STD_VIDEO_AV1_MAX_NUM_CB_POINTS]; + uint8_t point_cb_scaling[STD_VIDEO_AV1_MAX_NUM_CB_POINTS]; + uint8_t num_cr_points; + uint8_t point_cr_value[STD_VIDEO_AV1_MAX_NUM_CR_POINTS]; + uint8_t point_cr_scaling[STD_VIDEO_AV1_MAX_NUM_CR_POINTS]; + int8_t ar_coeffs_y_plus_128[STD_VIDEO_AV1_MAX_NUM_POS_LUMA]; + int8_t ar_coeffs_cb_plus_128[STD_VIDEO_AV1_MAX_NUM_POS_CHROMA]; + int8_t ar_coeffs_cr_plus_128[STD_VIDEO_AV1_MAX_NUM_POS_CHROMA]; + uint8_t cb_mult; + uint8_t cb_luma_mult; + uint16_t cb_offset; + uint8_t cr_mult; + uint8_t cr_luma_mult; + uint16_t cr_offset; +} StdVideoAV1FilmGrain; + +typedef struct StdVideoAV1SequenceHeaderFlags { + uint32_t still_picture : 1; + uint32_t reduced_still_picture_header : 1; + uint32_t use_128x128_superblock : 1; + uint32_t enable_filter_intra : 1; + uint32_t enable_intra_edge_filter : 1; + uint32_t enable_interintra_compound : 1; + uint32_t enable_masked_compound : 1; + uint32_t enable_warped_motion : 1; + uint32_t enable_dual_filter : 1; + uint32_t enable_order_hint : 1; + uint32_t enable_jnt_comp : 1; + uint32_t enable_ref_frame_mvs : 1; + uint32_t frame_id_numbers_present_flag : 1; + uint32_t enable_superres : 1; + uint32_t enable_cdef : 1; + uint32_t enable_restoration : 1; + uint32_t film_grain_params_present : 1; + uint32_t timing_info_present_flag : 1; + uint32_t initial_display_delay_present_flag : 1; + uint32_t reserved : 13; +} StdVideoAV1SequenceHeaderFlags; + +typedef struct StdVideoAV1SequenceHeader { + StdVideoAV1SequenceHeaderFlags flags; + StdVideoAV1Profile seq_profile; + uint8_t frame_width_bits_minus_1; + uint8_t frame_height_bits_minus_1; + uint16_t max_frame_width_minus_1; + uint16_t max_frame_height_minus_1; + uint8_t delta_frame_id_length_minus_2; + uint8_t additional_frame_id_length_minus_1; + uint8_t order_hint_bits_minus_1; + uint8_t seq_force_integer_mv; + uint8_t seq_force_screen_content_tools; + uint8_t reserved1[5]; + const StdVideoAV1ColorConfig* pColorConfig; + const StdVideoAV1TimingInfo* pTimingInfo; +} StdVideoAV1SequenceHeader; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vk_video/vulkan_video_codec_av1std_decode.h b/dependencies/vulkan/include/vk_video/vulkan_video_codec_av1std_decode.h new file mode 100644 index 000000000000..6b8130cd9fcf --- /dev/null +++ b/dependencies/vulkan/include/vk_video/vulkan_video_codec_av1std_decode.h @@ -0,0 +1,109 @@ +#ifndef VULKAN_VIDEO_CODEC_AV1STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_AV1STD_DECODE_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_av1std_decode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_av1std_decode 1 +#include "vulkan_video_codec_av1std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_AV1_DECODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_AV1_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_av1_decode" +typedef struct StdVideoDecodeAV1PictureInfoFlags { + uint32_t error_resilient_mode : 1; + uint32_t disable_cdf_update : 1; + uint32_t use_superres : 1; + uint32_t render_and_frame_size_different : 1; + uint32_t allow_screen_content_tools : 1; + uint32_t is_filter_switchable : 1; + uint32_t force_integer_mv : 1; + uint32_t frame_size_override_flag : 1; + uint32_t buffer_removal_time_present_flag : 1; + uint32_t allow_intrabc : 1; + uint32_t frame_refs_short_signaling : 1; + uint32_t allow_high_precision_mv : 1; + uint32_t is_motion_mode_switchable : 1; + uint32_t use_ref_frame_mvs : 1; + uint32_t disable_frame_end_update_cdf : 1; + uint32_t allow_warped_motion : 1; + uint32_t reduced_tx_set : 1; + uint32_t reference_select : 1; + uint32_t skip_mode_present : 1; + uint32_t delta_q_present : 1; + uint32_t delta_lf_present : 1; + uint32_t delta_lf_multi : 1; + uint32_t segmentation_enabled : 1; + uint32_t segmentation_update_map : 1; + uint32_t segmentation_temporal_update : 1; + uint32_t segmentation_update_data : 1; + uint32_t UsesLr : 1; + uint32_t usesChromaLr : 1; + uint32_t apply_grain : 1; + uint32_t reserved : 3; +} StdVideoDecodeAV1PictureInfoFlags; + +typedef struct StdVideoDecodeAV1PictureInfo { + StdVideoDecodeAV1PictureInfoFlags flags; + StdVideoAV1FrameType frame_type; + uint32_t current_frame_id; + uint8_t OrderHint; + uint8_t primary_ref_frame; + uint8_t refresh_frame_flags; + uint8_t reserved1; + StdVideoAV1InterpolationFilter interpolation_filter; + StdVideoAV1TxMode TxMode; + uint8_t delta_q_res; + uint8_t delta_lf_res; + uint8_t SkipModeFrame[STD_VIDEO_AV1_SKIP_MODE_FRAMES]; + uint8_t coded_denom; + uint8_t reserved2[3]; + uint8_t OrderHints[STD_VIDEO_AV1_NUM_REF_FRAMES]; + uint32_t expectedFrameId[STD_VIDEO_AV1_NUM_REF_FRAMES]; + const StdVideoAV1TileInfo* pTileInfo; + const StdVideoAV1Quantization* pQuantization; + const StdVideoAV1Segmentation* pSegmentation; + const StdVideoAV1LoopFilter* pLoopFilter; + const StdVideoAV1CDEF* pCDEF; + const StdVideoAV1LoopRestoration* pLoopRestoration; + const StdVideoAV1GlobalMotion* pGlobalMotion; + const StdVideoAV1FilmGrain* pFilmGrain; +} StdVideoDecodeAV1PictureInfo; + +typedef struct StdVideoDecodeAV1ReferenceInfoFlags { + uint32_t disable_frame_end_update_cdf : 1; + uint32_t segmentation_enabled : 1; + uint32_t reserved : 30; +} StdVideoDecodeAV1ReferenceInfoFlags; + +typedef struct StdVideoDecodeAV1ReferenceInfo { + StdVideoDecodeAV1ReferenceInfoFlags flags; + uint8_t frame_type; + uint8_t RefFrameSignBias; + uint8_t OrderHint; + uint8_t SavedOrderHints[STD_VIDEO_AV1_NUM_REF_FRAMES]; +} StdVideoDecodeAV1ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vk_video/vulkan_video_codec_h264std.h b/dependencies/vulkan/include/vk_video/vulkan_video_codec_h264std.h new file mode 100644 index 000000000000..6d27af37b733 --- /dev/null +++ b/dependencies/vulkan/include/vk_video/vulkan_video_codec_h264std.h @@ -0,0 +1,312 @@ +#ifndef VULKAN_VIDEO_CODEC_H264STD_H_ +#define VULKAN_VIDEO_CODEC_H264STD_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h264std is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h264std 1 +#include "vulkan_video_codecs_common.h" +#define STD_VIDEO_H264_CPB_CNT_LIST_SIZE 32 +#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS 6 +#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS 16 +#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS 6 +#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS 64 +#define STD_VIDEO_H264_MAX_NUM_LIST_REF 32 +#define STD_VIDEO_H264_MAX_CHROMA_PLANES 2 +#define STD_VIDEO_H264_NO_REFERENCE_PICTURE 0xFF + +typedef enum StdVideoH264ChromaFormatIdc { + STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME = 0, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_420 = 1, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_422 = 2, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_444 = 3, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264ChromaFormatIdc; + +typedef enum StdVideoH264ProfileIdc { + STD_VIDEO_H264_PROFILE_IDC_BASELINE = 66, + STD_VIDEO_H264_PROFILE_IDC_MAIN = 77, + STD_VIDEO_H264_PROFILE_IDC_HIGH = 100, + STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE = 244, + STD_VIDEO_H264_PROFILE_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264ProfileIdc; + +typedef enum StdVideoH264LevelIdc { + STD_VIDEO_H264_LEVEL_IDC_1_0 = 0, + STD_VIDEO_H264_LEVEL_IDC_1_1 = 1, + STD_VIDEO_H264_LEVEL_IDC_1_2 = 2, + STD_VIDEO_H264_LEVEL_IDC_1_3 = 3, + STD_VIDEO_H264_LEVEL_IDC_2_0 = 4, + STD_VIDEO_H264_LEVEL_IDC_2_1 = 5, + STD_VIDEO_H264_LEVEL_IDC_2_2 = 6, + STD_VIDEO_H264_LEVEL_IDC_3_0 = 7, + STD_VIDEO_H264_LEVEL_IDC_3_1 = 8, + STD_VIDEO_H264_LEVEL_IDC_3_2 = 9, + STD_VIDEO_H264_LEVEL_IDC_4_0 = 10, + STD_VIDEO_H264_LEVEL_IDC_4_1 = 11, + STD_VIDEO_H264_LEVEL_IDC_4_2 = 12, + STD_VIDEO_H264_LEVEL_IDC_5_0 = 13, + STD_VIDEO_H264_LEVEL_IDC_5_1 = 14, + STD_VIDEO_H264_LEVEL_IDC_5_2 = 15, + STD_VIDEO_H264_LEVEL_IDC_6_0 = 16, + STD_VIDEO_H264_LEVEL_IDC_6_1 = 17, + STD_VIDEO_H264_LEVEL_IDC_6_2 = 18, + STD_VIDEO_H264_LEVEL_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264LevelIdc; + +typedef enum StdVideoH264PocType { + STD_VIDEO_H264_POC_TYPE_0 = 0, + STD_VIDEO_H264_POC_TYPE_1 = 1, + STD_VIDEO_H264_POC_TYPE_2 = 2, + STD_VIDEO_H264_POC_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_POC_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264PocType; + +typedef enum StdVideoH264AspectRatioIdc { + STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED = 0, + STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE = 1, + STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11 = 2, + STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11 = 3, + STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11 = 4, + STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33 = 5, + STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11 = 6, + STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11 = 7, + STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11 = 8, + STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33 = 9, + STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11 = 10, + STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11 = 11, + STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33 = 12, + STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99 = 13, + STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3 = 14, + STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2 = 15, + STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1 = 16, + STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR = 255, + STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264AspectRatioIdc; + +typedef enum StdVideoH264WeightedBipredIdc { + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT = 0, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT = 1, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT = 2, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264WeightedBipredIdc; + +typedef enum StdVideoH264ModificationOfPicNumsIdc { + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT = 0, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD = 1, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM = 2, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END = 3, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264ModificationOfPicNumsIdc; + +typedef enum StdVideoH264MemMgmtControlOp { + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END = 0, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM = 1, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM = 2, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM = 3, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX = 4, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL = 5, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM = 6, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264MemMgmtControlOp; + +typedef enum StdVideoH264CabacInitIdc { + STD_VIDEO_H264_CABAC_INIT_IDC_0 = 0, + STD_VIDEO_H264_CABAC_INIT_IDC_1 = 1, + STD_VIDEO_H264_CABAC_INIT_IDC_2 = 2, + STD_VIDEO_H264_CABAC_INIT_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_CABAC_INIT_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264CabacInitIdc; + +typedef enum StdVideoH264DisableDeblockingFilterIdc { + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED = 0, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED = 1, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL = 2, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264DisableDeblockingFilterIdc; + +typedef enum StdVideoH264SliceType { + STD_VIDEO_H264_SLICE_TYPE_P = 0, + STD_VIDEO_H264_SLICE_TYPE_B = 1, + STD_VIDEO_H264_SLICE_TYPE_I = 2, + STD_VIDEO_H264_SLICE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264SliceType; + +typedef enum StdVideoH264PictureType { + STD_VIDEO_H264_PICTURE_TYPE_P = 0, + STD_VIDEO_H264_PICTURE_TYPE_B = 1, + STD_VIDEO_H264_PICTURE_TYPE_I = 2, + STD_VIDEO_H264_PICTURE_TYPE_IDR = 5, + STD_VIDEO_H264_PICTURE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264PictureType; + +typedef enum StdVideoH264NonVclNaluType { + STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS = 0, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS = 1, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD = 2, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX = 3, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE = 4, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM = 5, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED = 6, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H264_NON_VCL_NALU_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH264NonVclNaluType; +typedef struct StdVideoH264SpsVuiFlags { + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t color_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t timing_info_present_flag : 1; + uint32_t fixed_frame_rate_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; +} StdVideoH264SpsVuiFlags; + +typedef struct StdVideoH264HrdParameters { + uint8_t cpb_cnt_minus1; + uint8_t bit_rate_scale; + uint8_t cpb_size_scale; + uint8_t reserved1; + uint32_t bit_rate_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE]; + uint32_t cpb_size_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE]; + uint8_t cbr_flag[STD_VIDEO_H264_CPB_CNT_LIST_SIZE]; + uint32_t initial_cpb_removal_delay_length_minus1; + uint32_t cpb_removal_delay_length_minus1; + uint32_t dpb_output_delay_length_minus1; + uint32_t time_offset_length; +} StdVideoH264HrdParameters; + +typedef struct StdVideoH264SequenceParameterSetVui { + StdVideoH264SpsVuiFlags flags; + StdVideoH264AspectRatioIdc aspect_ratio_idc; + uint16_t sar_width; + uint16_t sar_height; + uint8_t video_format; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coefficients; + uint32_t num_units_in_tick; + uint32_t time_scale; + uint8_t max_num_reorder_frames; + uint8_t max_dec_frame_buffering; + uint8_t chroma_sample_loc_type_top_field; + uint8_t chroma_sample_loc_type_bottom_field; + uint32_t reserved1; + const StdVideoH264HrdParameters* pHrdParameters; +} StdVideoH264SequenceParameterSetVui; + +typedef struct StdVideoH264SpsFlags { + uint32_t constraint_set0_flag : 1; + uint32_t constraint_set1_flag : 1; + uint32_t constraint_set2_flag : 1; + uint32_t constraint_set3_flag : 1; + uint32_t constraint_set4_flag : 1; + uint32_t constraint_set5_flag : 1; + uint32_t direct_8x8_inference_flag : 1; + uint32_t mb_adaptive_frame_field_flag : 1; + uint32_t frame_mbs_only_flag : 1; + uint32_t delta_pic_order_always_zero_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t gaps_in_frame_num_value_allowed_flag : 1; + uint32_t qpprime_y_zero_transform_bypass_flag : 1; + uint32_t frame_cropping_flag : 1; + uint32_t seq_scaling_matrix_present_flag : 1; + uint32_t vui_parameters_present_flag : 1; +} StdVideoH264SpsFlags; + +typedef struct StdVideoH264ScalingLists { + uint16_t scaling_list_present_mask; + uint16_t use_default_scaling_matrix_mask; + uint8_t ScalingList4x4[STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS]; + uint8_t ScalingList8x8[STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS]; +} StdVideoH264ScalingLists; + +typedef struct StdVideoH264SequenceParameterSet { + StdVideoH264SpsFlags flags; + StdVideoH264ProfileIdc profile_idc; + StdVideoH264LevelIdc level_idc; + StdVideoH264ChromaFormatIdc chroma_format_idc; + uint8_t seq_parameter_set_id; + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t log2_max_frame_num_minus4; + StdVideoH264PocType pic_order_cnt_type; + int32_t offset_for_non_ref_pic; + int32_t offset_for_top_to_bottom_field; + uint8_t log2_max_pic_order_cnt_lsb_minus4; + uint8_t num_ref_frames_in_pic_order_cnt_cycle; + uint8_t max_num_ref_frames; + uint8_t reserved1; + uint32_t pic_width_in_mbs_minus1; + uint32_t pic_height_in_map_units_minus1; + uint32_t frame_crop_left_offset; + uint32_t frame_crop_right_offset; + uint32_t frame_crop_top_offset; + uint32_t frame_crop_bottom_offset; + uint32_t reserved2; + const int32_t* pOffsetForRefFrame; + const StdVideoH264ScalingLists* pScalingLists; + const StdVideoH264SequenceParameterSetVui* pSequenceParameterSetVui; +} StdVideoH264SequenceParameterSet; + +typedef struct StdVideoH264PpsFlags { + uint32_t transform_8x8_mode_flag : 1; + uint32_t redundant_pic_cnt_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t bottom_field_pic_order_in_frame_present_flag : 1; + uint32_t entropy_coding_mode_flag : 1; + uint32_t pic_scaling_matrix_present_flag : 1; +} StdVideoH264PpsFlags; + +typedef struct StdVideoH264PictureParameterSet { + StdVideoH264PpsFlags flags; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint8_t num_ref_idx_l0_default_active_minus1; + uint8_t num_ref_idx_l1_default_active_minus1; + StdVideoH264WeightedBipredIdc weighted_bipred_idc; + int8_t pic_init_qp_minus26; + int8_t pic_init_qs_minus26; + int8_t chroma_qp_index_offset; + int8_t second_chroma_qp_index_offset; + const StdVideoH264ScalingLists* pScalingLists; +} StdVideoH264PictureParameterSet; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vk_video/vulkan_video_codec_h264std_decode.h b/dependencies/vulkan/include/vk_video/vulkan_video_codec_h264std_decode.h new file mode 100644 index 000000000000..439cb885e71b --- /dev/null +++ b/dependencies/vulkan/include/vk_video/vulkan_video_codec_h264std_decode.h @@ -0,0 +1,77 @@ +#ifndef VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h264std_decode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h264std_decode 1 +#include "vulkan_video_codec_h264std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_decode" +#define STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE 2 + +typedef enum StdVideoDecodeH264FieldOrderCount { + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP = 0, + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM = 1, + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID = 0x7FFFFFFF, + STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_MAX_ENUM = 0x7FFFFFFF +} StdVideoDecodeH264FieldOrderCount; +typedef struct StdVideoDecodeH264PictureInfoFlags { + uint32_t field_pic_flag : 1; + uint32_t is_intra : 1; + uint32_t IdrPicFlag : 1; + uint32_t bottom_field_flag : 1; + uint32_t is_reference : 1; + uint32_t complementary_field_pair : 1; +} StdVideoDecodeH264PictureInfoFlags; + +typedef struct StdVideoDecodeH264PictureInfo { + StdVideoDecodeH264PictureInfoFlags flags; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint8_t reserved1; + uint8_t reserved2; + uint16_t frame_num; + uint16_t idr_pic_id; + int32_t PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE]; +} StdVideoDecodeH264PictureInfo; + +typedef struct StdVideoDecodeH264ReferenceInfoFlags { + uint32_t top_field_flag : 1; + uint32_t bottom_field_flag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t is_non_existing : 1; +} StdVideoDecodeH264ReferenceInfoFlags; + +typedef struct StdVideoDecodeH264ReferenceInfo { + StdVideoDecodeH264ReferenceInfoFlags flags; + uint16_t FrameNum; + uint16_t reserved; + int32_t PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE]; +} StdVideoDecodeH264ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vk_video/vulkan_video_codec_h264std_encode.h b/dependencies/vulkan/include/vk_video/vulkan_video_codec_h264std_encode.h new file mode 100644 index 000000000000..9e24aa5d991d --- /dev/null +++ b/dependencies/vulkan/include/vk_video/vulkan_video_codec_h264std_encode.h @@ -0,0 +1,147 @@ +#ifndef VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ +#define VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h264std_encode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h264std_encode 1 +#include "vulkan_video_codec_h264std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_encode" +typedef struct StdVideoEncodeH264WeightTableFlags { + uint32_t luma_weight_l0_flag; + uint32_t chroma_weight_l0_flag; + uint32_t luma_weight_l1_flag; + uint32_t chroma_weight_l1_flag; +} StdVideoEncodeH264WeightTableFlags; + +typedef struct StdVideoEncodeH264WeightTable { + StdVideoEncodeH264WeightTableFlags flags; + uint8_t luma_log2_weight_denom; + uint8_t chroma_log2_weight_denom; + int8_t luma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t luma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t chroma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; + int8_t chroma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; + int8_t luma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t luma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + int8_t chroma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; + int8_t chroma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES]; +} StdVideoEncodeH264WeightTable; + +typedef struct StdVideoEncodeH264SliceHeaderFlags { + uint32_t direct_spatial_mv_pred_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t reserved : 30; +} StdVideoEncodeH264SliceHeaderFlags; + +typedef struct StdVideoEncodeH264PictureInfoFlags { + uint32_t IdrPicFlag : 1; + uint32_t is_reference : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t long_term_reference_flag : 1; + uint32_t adaptive_ref_pic_marking_mode_flag : 1; + uint32_t reserved : 27; +} StdVideoEncodeH264PictureInfoFlags; + +typedef struct StdVideoEncodeH264ReferenceInfoFlags { + uint32_t used_for_long_term_reference : 1; + uint32_t reserved : 31; +} StdVideoEncodeH264ReferenceInfoFlags; + +typedef struct StdVideoEncodeH264ReferenceListsInfoFlags { + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; +} StdVideoEncodeH264ReferenceListsInfoFlags; + +typedef struct StdVideoEncodeH264RefListModEntry { + StdVideoH264ModificationOfPicNumsIdc modification_of_pic_nums_idc; + uint16_t abs_diff_pic_num_minus1; + uint16_t long_term_pic_num; +} StdVideoEncodeH264RefListModEntry; + +typedef struct StdVideoEncodeH264RefPicMarkingEntry { + StdVideoH264MemMgmtControlOp memory_management_control_operation; + uint16_t difference_of_pic_nums_minus1; + uint16_t long_term_pic_num; + uint16_t long_term_frame_idx; + uint16_t max_long_term_frame_idx_plus1; +} StdVideoEncodeH264RefPicMarkingEntry; + +typedef struct StdVideoEncodeH264ReferenceListsInfo { + StdVideoEncodeH264ReferenceListsInfoFlags flags; + uint8_t num_ref_idx_l0_active_minus1; + uint8_t num_ref_idx_l1_active_minus1; + uint8_t RefPicList0[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + uint8_t RefPicList1[STD_VIDEO_H264_MAX_NUM_LIST_REF]; + uint8_t refList0ModOpCount; + uint8_t refList1ModOpCount; + uint8_t refPicMarkingOpCount; + uint8_t reserved1[7]; + const StdVideoEncodeH264RefListModEntry* pRefList0ModOperations; + const StdVideoEncodeH264RefListModEntry* pRefList1ModOperations; + const StdVideoEncodeH264RefPicMarkingEntry* pRefPicMarkingOperations; +} StdVideoEncodeH264ReferenceListsInfo; + +typedef struct StdVideoEncodeH264PictureInfo { + StdVideoEncodeH264PictureInfoFlags flags; + uint8_t seq_parameter_set_id; + uint8_t pic_parameter_set_id; + uint16_t idr_pic_id; + StdVideoH264PictureType primary_pic_type; + uint32_t frame_num; + int32_t PicOrderCnt; + uint8_t temporal_id; + uint8_t reserved1[3]; + const StdVideoEncodeH264ReferenceListsInfo* pRefLists; +} StdVideoEncodeH264PictureInfo; + +typedef struct StdVideoEncodeH264ReferenceInfo { + StdVideoEncodeH264ReferenceInfoFlags flags; + StdVideoH264PictureType primary_pic_type; + uint32_t FrameNum; + int32_t PicOrderCnt; + uint16_t long_term_pic_num; + uint16_t long_term_frame_idx; + uint8_t temporal_id; +} StdVideoEncodeH264ReferenceInfo; + +typedef struct StdVideoEncodeH264SliceHeader { + StdVideoEncodeH264SliceHeaderFlags flags; + uint32_t first_mb_in_slice; + StdVideoH264SliceType slice_type; + int8_t slice_alpha_c0_offset_div2; + int8_t slice_beta_offset_div2; + int8_t slice_qp_delta; + uint8_t reserved1; + StdVideoH264CabacInitIdc cabac_init_idc; + StdVideoH264DisableDeblockingFilterIdc disable_deblocking_filter_idc; + const StdVideoEncodeH264WeightTable* pWeightTable; +} StdVideoEncodeH264SliceHeader; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vk_video/vulkan_video_codec_h265std.h b/dependencies/vulkan/include/vk_video/vulkan_video_codec_h265std.h new file mode 100644 index 000000000000..d0a1bacbea37 --- /dev/null +++ b/dependencies/vulkan/include/vk_video/vulkan_video_codec_h265std.h @@ -0,0 +1,446 @@ +#ifndef VULKAN_VIDEO_CODEC_H265STD_H_ +#define VULKAN_VIDEO_CODEC_H265STD_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h265std is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h265std 1 +#include "vulkan_video_codecs_common.h" +#define STD_VIDEO_H265_CPB_CNT_LIST_SIZE 32 +#define STD_VIDEO_H265_SUBLAYERS_LIST_SIZE 7 +#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS 6 +#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS 16 +#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS 6 +#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS 64 +#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS 6 +#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS 64 +#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS 2 +#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS 64 +#define STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE 6 +#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE 19 +#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE 21 +#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE 3 +#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE 128 +#define STD_VIDEO_H265_MAX_NUM_LIST_REF 15 +#define STD_VIDEO_H265_MAX_CHROMA_PLANES 2 +#define STD_VIDEO_H265_MAX_SHORT_TERM_REF_PIC_SETS 64 +#define STD_VIDEO_H265_MAX_DPB_SIZE 16 +#define STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS 32 +#define STD_VIDEO_H265_MAX_LONG_TERM_PICS 16 +#define STD_VIDEO_H265_MAX_DELTA_POC 48 +#define STD_VIDEO_H265_NO_REFERENCE_PICTURE 0xFF + +typedef enum StdVideoH265ChromaFormatIdc { + STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME = 0, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_420 = 1, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_422 = 2, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_444 = 3, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265ChromaFormatIdc; + +typedef enum StdVideoH265ProfileIdc { + STD_VIDEO_H265_PROFILE_IDC_MAIN = 1, + STD_VIDEO_H265_PROFILE_IDC_MAIN_10 = 2, + STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE = 3, + STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS = 4, + STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS = 9, + STD_VIDEO_H265_PROFILE_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265ProfileIdc; + +typedef enum StdVideoH265LevelIdc { + STD_VIDEO_H265_LEVEL_IDC_1_0 = 0, + STD_VIDEO_H265_LEVEL_IDC_2_0 = 1, + STD_VIDEO_H265_LEVEL_IDC_2_1 = 2, + STD_VIDEO_H265_LEVEL_IDC_3_0 = 3, + STD_VIDEO_H265_LEVEL_IDC_3_1 = 4, + STD_VIDEO_H265_LEVEL_IDC_4_0 = 5, + STD_VIDEO_H265_LEVEL_IDC_4_1 = 6, + STD_VIDEO_H265_LEVEL_IDC_5_0 = 7, + STD_VIDEO_H265_LEVEL_IDC_5_1 = 8, + STD_VIDEO_H265_LEVEL_IDC_5_2 = 9, + STD_VIDEO_H265_LEVEL_IDC_6_0 = 10, + STD_VIDEO_H265_LEVEL_IDC_6_1 = 11, + STD_VIDEO_H265_LEVEL_IDC_6_2 = 12, + STD_VIDEO_H265_LEVEL_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265LevelIdc; + +typedef enum StdVideoH265SliceType { + STD_VIDEO_H265_SLICE_TYPE_B = 0, + STD_VIDEO_H265_SLICE_TYPE_P = 1, + STD_VIDEO_H265_SLICE_TYPE_I = 2, + STD_VIDEO_H265_SLICE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265SliceType; + +typedef enum StdVideoH265PictureType { + STD_VIDEO_H265_PICTURE_TYPE_P = 0, + STD_VIDEO_H265_PICTURE_TYPE_B = 1, + STD_VIDEO_H265_PICTURE_TYPE_I = 2, + STD_VIDEO_H265_PICTURE_TYPE_IDR = 3, + STD_VIDEO_H265_PICTURE_TYPE_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265PictureType; + +typedef enum StdVideoH265AspectRatioIdc { + STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED = 0, + STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE = 1, + STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11 = 2, + STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11 = 3, + STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11 = 4, + STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33 = 5, + STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11 = 6, + STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11 = 7, + STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11 = 8, + STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33 = 9, + STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11 = 10, + STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11 = 11, + STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33 = 12, + STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99 = 13, + STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3 = 14, + STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2 = 15, + STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1 = 16, + STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR = 255, + STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF, + STD_VIDEO_H265_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF +} StdVideoH265AspectRatioIdc; +typedef struct StdVideoH265DecPicBufMgr { + uint32_t max_latency_increase_plus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint8_t max_dec_pic_buffering_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint8_t max_num_reorder_pics[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; +} StdVideoH265DecPicBufMgr; + +typedef struct StdVideoH265SubLayerHrdParameters { + uint32_t bit_rate_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t cpb_size_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t cpb_size_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t bit_rate_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE]; + uint32_t cbr_flag; +} StdVideoH265SubLayerHrdParameters; + +typedef struct StdVideoH265HrdFlags { + uint32_t nal_hrd_parameters_present_flag : 1; + uint32_t vcl_hrd_parameters_present_flag : 1; + uint32_t sub_pic_hrd_params_present_flag : 1; + uint32_t sub_pic_cpb_params_in_pic_timing_sei_flag : 1; + uint32_t fixed_pic_rate_general_flag : 8; + uint32_t fixed_pic_rate_within_cvs_flag : 8; + uint32_t low_delay_hrd_flag : 8; +} StdVideoH265HrdFlags; + +typedef struct StdVideoH265HrdParameters { + StdVideoH265HrdFlags flags; + uint8_t tick_divisor_minus2; + uint8_t du_cpb_removal_delay_increment_length_minus1; + uint8_t dpb_output_delay_du_length_minus1; + uint8_t bit_rate_scale; + uint8_t cpb_size_scale; + uint8_t cpb_size_du_scale; + uint8_t initial_cpb_removal_delay_length_minus1; + uint8_t au_cpb_removal_delay_length_minus1; + uint8_t dpb_output_delay_length_minus1; + uint8_t cpb_cnt_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint16_t elemental_duration_in_tc_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE]; + uint16_t reserved[3]; + const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersNal; + const StdVideoH265SubLayerHrdParameters* pSubLayerHrdParametersVcl; +} StdVideoH265HrdParameters; + +typedef struct StdVideoH265VpsFlags { + uint32_t vps_temporal_id_nesting_flag : 1; + uint32_t vps_sub_layer_ordering_info_present_flag : 1; + uint32_t vps_timing_info_present_flag : 1; + uint32_t vps_poc_proportional_to_timing_flag : 1; +} StdVideoH265VpsFlags; + +typedef struct StdVideoH265ProfileTierLevelFlags { + uint32_t general_tier_flag : 1; + uint32_t general_progressive_source_flag : 1; + uint32_t general_interlaced_source_flag : 1; + uint32_t general_non_packed_constraint_flag : 1; + uint32_t general_frame_only_constraint_flag : 1; +} StdVideoH265ProfileTierLevelFlags; + +typedef struct StdVideoH265ProfileTierLevel { + StdVideoH265ProfileTierLevelFlags flags; + StdVideoH265ProfileIdc general_profile_idc; + StdVideoH265LevelIdc general_level_idc; +} StdVideoH265ProfileTierLevel; + +typedef struct StdVideoH265VideoParameterSet { + StdVideoH265VpsFlags flags; + uint8_t vps_video_parameter_set_id; + uint8_t vps_max_sub_layers_minus1; + uint8_t reserved1; + uint8_t reserved2; + uint32_t vps_num_units_in_tick; + uint32_t vps_time_scale; + uint32_t vps_num_ticks_poc_diff_one_minus1; + uint32_t reserved3; + const StdVideoH265DecPicBufMgr* pDecPicBufMgr; + const StdVideoH265HrdParameters* pHrdParameters; + const StdVideoH265ProfileTierLevel* pProfileTierLevel; +} StdVideoH265VideoParameterSet; + +typedef struct StdVideoH265ScalingLists { + uint8_t ScalingList4x4[STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS]; + uint8_t ScalingList8x8[STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS]; + uint8_t ScalingList16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS]; + uint8_t ScalingList32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS]; + uint8_t ScalingListDCCoef16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS]; + uint8_t ScalingListDCCoef32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS]; +} StdVideoH265ScalingLists; + +typedef struct StdVideoH265SpsVuiFlags { + uint32_t aspect_ratio_info_present_flag : 1; + uint32_t overscan_info_present_flag : 1; + uint32_t overscan_appropriate_flag : 1; + uint32_t video_signal_type_present_flag : 1; + uint32_t video_full_range_flag : 1; + uint32_t colour_description_present_flag : 1; + uint32_t chroma_loc_info_present_flag : 1; + uint32_t neutral_chroma_indication_flag : 1; + uint32_t field_seq_flag : 1; + uint32_t frame_field_info_present_flag : 1; + uint32_t default_display_window_flag : 1; + uint32_t vui_timing_info_present_flag : 1; + uint32_t vui_poc_proportional_to_timing_flag : 1; + uint32_t vui_hrd_parameters_present_flag : 1; + uint32_t bitstream_restriction_flag : 1; + uint32_t tiles_fixed_structure_flag : 1; + uint32_t motion_vectors_over_pic_boundaries_flag : 1; + uint32_t restricted_ref_pic_lists_flag : 1; +} StdVideoH265SpsVuiFlags; + +typedef struct StdVideoH265SequenceParameterSetVui { + StdVideoH265SpsVuiFlags flags; + StdVideoH265AspectRatioIdc aspect_ratio_idc; + uint16_t sar_width; + uint16_t sar_height; + uint8_t video_format; + uint8_t colour_primaries; + uint8_t transfer_characteristics; + uint8_t matrix_coeffs; + uint8_t chroma_sample_loc_type_top_field; + uint8_t chroma_sample_loc_type_bottom_field; + uint8_t reserved1; + uint8_t reserved2; + uint16_t def_disp_win_left_offset; + uint16_t def_disp_win_right_offset; + uint16_t def_disp_win_top_offset; + uint16_t def_disp_win_bottom_offset; + uint32_t vui_num_units_in_tick; + uint32_t vui_time_scale; + uint32_t vui_num_ticks_poc_diff_one_minus1; + uint16_t min_spatial_segmentation_idc; + uint16_t reserved3; + uint8_t max_bytes_per_pic_denom; + uint8_t max_bits_per_min_cu_denom; + uint8_t log2_max_mv_length_horizontal; + uint8_t log2_max_mv_length_vertical; + const StdVideoH265HrdParameters* pHrdParameters; +} StdVideoH265SequenceParameterSetVui; + +typedef struct StdVideoH265PredictorPaletteEntries { + uint16_t PredictorPaletteEntries[STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE][STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE]; +} StdVideoH265PredictorPaletteEntries; + +typedef struct StdVideoH265SpsFlags { + uint32_t sps_temporal_id_nesting_flag : 1; + uint32_t separate_colour_plane_flag : 1; + uint32_t conformance_window_flag : 1; + uint32_t sps_sub_layer_ordering_info_present_flag : 1; + uint32_t scaling_list_enabled_flag : 1; + uint32_t sps_scaling_list_data_present_flag : 1; + uint32_t amp_enabled_flag : 1; + uint32_t sample_adaptive_offset_enabled_flag : 1; + uint32_t pcm_enabled_flag : 1; + uint32_t pcm_loop_filter_disabled_flag : 1; + uint32_t long_term_ref_pics_present_flag : 1; + uint32_t sps_temporal_mvp_enabled_flag : 1; + uint32_t strong_intra_smoothing_enabled_flag : 1; + uint32_t vui_parameters_present_flag : 1; + uint32_t sps_extension_present_flag : 1; + uint32_t sps_range_extension_flag : 1; + uint32_t transform_skip_rotation_enabled_flag : 1; + uint32_t transform_skip_context_enabled_flag : 1; + uint32_t implicit_rdpcm_enabled_flag : 1; + uint32_t explicit_rdpcm_enabled_flag : 1; + uint32_t extended_precision_processing_flag : 1; + uint32_t intra_smoothing_disabled_flag : 1; + uint32_t high_precision_offsets_enabled_flag : 1; + uint32_t persistent_rice_adaptation_enabled_flag : 1; + uint32_t cabac_bypass_alignment_enabled_flag : 1; + uint32_t sps_scc_extension_flag : 1; + uint32_t sps_curr_pic_ref_enabled_flag : 1; + uint32_t palette_mode_enabled_flag : 1; + uint32_t sps_palette_predictor_initializers_present_flag : 1; + uint32_t intra_boundary_filtering_disabled_flag : 1; +} StdVideoH265SpsFlags; + +typedef struct StdVideoH265ShortTermRefPicSetFlags { + uint32_t inter_ref_pic_set_prediction_flag : 1; + uint32_t delta_rps_sign : 1; +} StdVideoH265ShortTermRefPicSetFlags; + +typedef struct StdVideoH265ShortTermRefPicSet { + StdVideoH265ShortTermRefPicSetFlags flags; + uint32_t delta_idx_minus1; + uint16_t use_delta_flag; + uint16_t abs_delta_rps_minus1; + uint16_t used_by_curr_pic_flag; + uint16_t used_by_curr_pic_s0_flag; + uint16_t used_by_curr_pic_s1_flag; + uint16_t reserved1; + uint8_t reserved2; + uint8_t reserved3; + uint8_t num_negative_pics; + uint8_t num_positive_pics; + uint16_t delta_poc_s0_minus1[STD_VIDEO_H265_MAX_DPB_SIZE]; + uint16_t delta_poc_s1_minus1[STD_VIDEO_H265_MAX_DPB_SIZE]; +} StdVideoH265ShortTermRefPicSet; + +typedef struct StdVideoH265LongTermRefPicsSps { + uint32_t used_by_curr_pic_lt_sps_flag; + uint32_t lt_ref_pic_poc_lsb_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS]; +} StdVideoH265LongTermRefPicsSps; + +typedef struct StdVideoH265SequenceParameterSet { + StdVideoH265SpsFlags flags; + StdVideoH265ChromaFormatIdc chroma_format_idc; + uint32_t pic_width_in_luma_samples; + uint32_t pic_height_in_luma_samples; + uint8_t sps_video_parameter_set_id; + uint8_t sps_max_sub_layers_minus1; + uint8_t sps_seq_parameter_set_id; + uint8_t bit_depth_luma_minus8; + uint8_t bit_depth_chroma_minus8; + uint8_t log2_max_pic_order_cnt_lsb_minus4; + uint8_t log2_min_luma_coding_block_size_minus3; + uint8_t log2_diff_max_min_luma_coding_block_size; + uint8_t log2_min_luma_transform_block_size_minus2; + uint8_t log2_diff_max_min_luma_transform_block_size; + uint8_t max_transform_hierarchy_depth_inter; + uint8_t max_transform_hierarchy_depth_intra; + uint8_t num_short_term_ref_pic_sets; + uint8_t num_long_term_ref_pics_sps; + uint8_t pcm_sample_bit_depth_luma_minus1; + uint8_t pcm_sample_bit_depth_chroma_minus1; + uint8_t log2_min_pcm_luma_coding_block_size_minus3; + uint8_t log2_diff_max_min_pcm_luma_coding_block_size; + uint8_t reserved1; + uint8_t reserved2; + uint8_t palette_max_size; + uint8_t delta_palette_max_predictor_size; + uint8_t motion_vector_resolution_control_idc; + uint8_t sps_num_palette_predictor_initializers_minus1; + uint32_t conf_win_left_offset; + uint32_t conf_win_right_offset; + uint32_t conf_win_top_offset; + uint32_t conf_win_bottom_offset; + const StdVideoH265ProfileTierLevel* pProfileTierLevel; + const StdVideoH265DecPicBufMgr* pDecPicBufMgr; + const StdVideoH265ScalingLists* pScalingLists; + const StdVideoH265ShortTermRefPicSet* pShortTermRefPicSet; + const StdVideoH265LongTermRefPicsSps* pLongTermRefPicsSps; + const StdVideoH265SequenceParameterSetVui* pSequenceParameterSetVui; + const StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries; +} StdVideoH265SequenceParameterSet; + +typedef struct StdVideoH265PpsFlags { + uint32_t dependent_slice_segments_enabled_flag : 1; + uint32_t output_flag_present_flag : 1; + uint32_t sign_data_hiding_enabled_flag : 1; + uint32_t cabac_init_present_flag : 1; + uint32_t constrained_intra_pred_flag : 1; + uint32_t transform_skip_enabled_flag : 1; + uint32_t cu_qp_delta_enabled_flag : 1; + uint32_t pps_slice_chroma_qp_offsets_present_flag : 1; + uint32_t weighted_pred_flag : 1; + uint32_t weighted_bipred_flag : 1; + uint32_t transquant_bypass_enabled_flag : 1; + uint32_t tiles_enabled_flag : 1; + uint32_t entropy_coding_sync_enabled_flag : 1; + uint32_t uniform_spacing_flag : 1; + uint32_t loop_filter_across_tiles_enabled_flag : 1; + uint32_t pps_loop_filter_across_slices_enabled_flag : 1; + uint32_t deblocking_filter_control_present_flag : 1; + uint32_t deblocking_filter_override_enabled_flag : 1; + uint32_t pps_deblocking_filter_disabled_flag : 1; + uint32_t pps_scaling_list_data_present_flag : 1; + uint32_t lists_modification_present_flag : 1; + uint32_t slice_segment_header_extension_present_flag : 1; + uint32_t pps_extension_present_flag : 1; + uint32_t cross_component_prediction_enabled_flag : 1; + uint32_t chroma_qp_offset_list_enabled_flag : 1; + uint32_t pps_curr_pic_ref_enabled_flag : 1; + uint32_t residual_adaptive_colour_transform_enabled_flag : 1; + uint32_t pps_slice_act_qp_offsets_present_flag : 1; + uint32_t pps_palette_predictor_initializers_present_flag : 1; + uint32_t monochrome_palette_flag : 1; + uint32_t pps_range_extension_flag : 1; +} StdVideoH265PpsFlags; + +typedef struct StdVideoH265PictureParameterSet { + StdVideoH265PpsFlags flags; + uint8_t pps_pic_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t sps_video_parameter_set_id; + uint8_t num_extra_slice_header_bits; + uint8_t num_ref_idx_l0_default_active_minus1; + uint8_t num_ref_idx_l1_default_active_minus1; + int8_t init_qp_minus26; + uint8_t diff_cu_qp_delta_depth; + int8_t pps_cb_qp_offset; + int8_t pps_cr_qp_offset; + int8_t pps_beta_offset_div2; + int8_t pps_tc_offset_div2; + uint8_t log2_parallel_merge_level_minus2; + uint8_t log2_max_transform_skip_block_size_minus2; + uint8_t diff_cu_chroma_qp_offset_depth; + uint8_t chroma_qp_offset_list_len_minus1; + int8_t cb_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE]; + int8_t cr_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE]; + uint8_t log2_sao_offset_scale_luma; + uint8_t log2_sao_offset_scale_chroma; + int8_t pps_act_y_qp_offset_plus5; + int8_t pps_act_cb_qp_offset_plus5; + int8_t pps_act_cr_qp_offset_plus3; + uint8_t pps_num_palette_predictor_initializers; + uint8_t luma_bit_depth_entry_minus8; + uint8_t chroma_bit_depth_entry_minus8; + uint8_t num_tile_columns_minus1; + uint8_t num_tile_rows_minus1; + uint8_t reserved1; + uint8_t reserved2; + uint16_t column_width_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE]; + uint16_t row_height_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE]; + uint32_t reserved3; + const StdVideoH265ScalingLists* pScalingLists; + const StdVideoH265PredictorPaletteEntries* pPredictorPaletteEntries; +} StdVideoH265PictureParameterSet; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vk_video/vulkan_video_codec_h265std_decode.h b/dependencies/vulkan/include/vk_video/vulkan_video_codec_h265std_decode.h new file mode 100644 index 000000000000..0178793e5111 --- /dev/null +++ b/dependencies/vulkan/include/vk_video/vulkan_video_codec_h265std_decode.h @@ -0,0 +1,67 @@ +#ifndef VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ +#define VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h265std_decode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h265std_decode 1 +#include "vulkan_video_codec_h265std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_decode" +#define STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE 8 +typedef struct StdVideoDecodeH265PictureInfoFlags { + uint32_t IrapPicFlag : 1; + uint32_t IdrPicFlag : 1; + uint32_t IsReference : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; +} StdVideoDecodeH265PictureInfoFlags; + +typedef struct StdVideoDecodeH265PictureInfo { + StdVideoDecodeH265PictureInfoFlags flags; + uint8_t sps_video_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t pps_pic_parameter_set_id; + uint8_t NumDeltaPocsOfRefRpsIdx; + int32_t PicOrderCntVal; + uint16_t NumBitsForSTRefPicSetInSlice; + uint16_t reserved; + uint8_t RefPicSetStCurrBefore[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE]; + uint8_t RefPicSetStCurrAfter[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE]; + uint8_t RefPicSetLtCurr[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE]; +} StdVideoDecodeH265PictureInfo; + +typedef struct StdVideoDecodeH265ReferenceInfoFlags { + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; +} StdVideoDecodeH265ReferenceInfoFlags; + +typedef struct StdVideoDecodeH265ReferenceInfo { + StdVideoDecodeH265ReferenceInfoFlags flags; + int32_t PicOrderCntVal; +} StdVideoDecodeH265ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vk_video/vulkan_video_codec_h265std_encode.h b/dependencies/vulkan/include/vk_video/vulkan_video_codec_h265std_encode.h new file mode 100644 index 000000000000..ee34491f4e9c --- /dev/null +++ b/dependencies/vulkan/include/vk_video/vulkan_video_codec_h265std_encode.h @@ -0,0 +1,157 @@ +#ifndef VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_ +#define VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codec_h265std_encode is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codec_h265std_encode 1 +#include "vulkan_video_codec_h265std.h" + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0) + +#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_1_0_0 +#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_encode" +typedef struct StdVideoEncodeH265WeightTableFlags { + uint16_t luma_weight_l0_flag; + uint16_t chroma_weight_l0_flag; + uint16_t luma_weight_l1_flag; + uint16_t chroma_weight_l1_flag; +} StdVideoEncodeH265WeightTableFlags; + +typedef struct StdVideoEncodeH265WeightTable { + StdVideoEncodeH265WeightTableFlags flags; + uint8_t luma_log2_weight_denom; + int8_t delta_chroma_log2_weight_denom; + int8_t delta_luma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t luma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t delta_chroma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; + int8_t delta_chroma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; + int8_t delta_luma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t luma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + int8_t delta_chroma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; + int8_t delta_chroma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES]; +} StdVideoEncodeH265WeightTable; + +typedef struct StdVideoEncodeH265SliceSegmentHeaderFlags { + uint32_t first_slice_segment_in_pic_flag : 1; + uint32_t dependent_slice_segment_flag : 1; + uint32_t slice_sao_luma_flag : 1; + uint32_t slice_sao_chroma_flag : 1; + uint32_t num_ref_idx_active_override_flag : 1; + uint32_t mvd_l1_zero_flag : 1; + uint32_t cabac_init_flag : 1; + uint32_t cu_chroma_qp_offset_enabled_flag : 1; + uint32_t deblocking_filter_override_flag : 1; + uint32_t slice_deblocking_filter_disabled_flag : 1; + uint32_t collocated_from_l0_flag : 1; + uint32_t slice_loop_filter_across_slices_enabled_flag : 1; + uint32_t reserved : 20; +} StdVideoEncodeH265SliceSegmentHeaderFlags; + +typedef struct StdVideoEncodeH265SliceSegmentHeader { + StdVideoEncodeH265SliceSegmentHeaderFlags flags; + StdVideoH265SliceType slice_type; + uint32_t slice_segment_address; + uint8_t collocated_ref_idx; + uint8_t MaxNumMergeCand; + int8_t slice_cb_qp_offset; + int8_t slice_cr_qp_offset; + int8_t slice_beta_offset_div2; + int8_t slice_tc_offset_div2; + int8_t slice_act_y_qp_offset; + int8_t slice_act_cb_qp_offset; + int8_t slice_act_cr_qp_offset; + int8_t slice_qp_delta; + uint16_t reserved1; + const StdVideoEncodeH265WeightTable* pWeightTable; +} StdVideoEncodeH265SliceSegmentHeader; + +typedef struct StdVideoEncodeH265ReferenceListsInfoFlags { + uint32_t ref_pic_list_modification_flag_l0 : 1; + uint32_t ref_pic_list_modification_flag_l1 : 1; + uint32_t reserved : 30; +} StdVideoEncodeH265ReferenceListsInfoFlags; + +typedef struct StdVideoEncodeH265ReferenceListsInfo { + StdVideoEncodeH265ReferenceListsInfoFlags flags; + uint8_t num_ref_idx_l0_active_minus1; + uint8_t num_ref_idx_l1_active_minus1; + uint8_t RefPicList0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + uint8_t RefPicList1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + uint8_t list_entry_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF]; + uint8_t list_entry_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF]; +} StdVideoEncodeH265ReferenceListsInfo; + +typedef struct StdVideoEncodeH265PictureInfoFlags { + uint32_t is_reference : 1; + uint32_t IrapPicFlag : 1; + uint32_t used_for_long_term_reference : 1; + uint32_t discardable_flag : 1; + uint32_t cross_layer_bla_flag : 1; + uint32_t pic_output_flag : 1; + uint32_t no_output_of_prior_pics_flag : 1; + uint32_t short_term_ref_pic_set_sps_flag : 1; + uint32_t slice_temporal_mvp_enabled_flag : 1; + uint32_t reserved : 23; +} StdVideoEncodeH265PictureInfoFlags; + +typedef struct StdVideoEncodeH265LongTermRefPics { + uint8_t num_long_term_sps; + uint8_t num_long_term_pics; + uint8_t lt_idx_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS]; + uint8_t poc_lsb_lt[STD_VIDEO_H265_MAX_LONG_TERM_PICS]; + uint16_t used_by_curr_pic_lt_flag; + uint8_t delta_poc_msb_present_flag[STD_VIDEO_H265_MAX_DELTA_POC]; + uint8_t delta_poc_msb_cycle_lt[STD_VIDEO_H265_MAX_DELTA_POC]; +} StdVideoEncodeH265LongTermRefPics; + +typedef struct StdVideoEncodeH265PictureInfo { + StdVideoEncodeH265PictureInfoFlags flags; + StdVideoH265PictureType pic_type; + uint8_t sps_video_parameter_set_id; + uint8_t pps_seq_parameter_set_id; + uint8_t pps_pic_parameter_set_id; + uint8_t short_term_ref_pic_set_idx; + int32_t PicOrderCntVal; + uint8_t TemporalId; + uint8_t reserved1[7]; + const StdVideoEncodeH265ReferenceListsInfo* pRefLists; + const StdVideoH265ShortTermRefPicSet* pShortTermRefPicSet; + const StdVideoEncodeH265LongTermRefPics* pLongTermRefPics; +} StdVideoEncodeH265PictureInfo; + +typedef struct StdVideoEncodeH265ReferenceInfoFlags { + uint32_t used_for_long_term_reference : 1; + uint32_t unused_for_reference : 1; + uint32_t reserved : 30; +} StdVideoEncodeH265ReferenceInfoFlags; + +typedef struct StdVideoEncodeH265ReferenceInfo { + StdVideoEncodeH265ReferenceInfoFlags flags; + StdVideoH265PictureType pic_type; + int32_t PicOrderCntVal; + uint8_t TemporalId; +} StdVideoEncodeH265ReferenceInfo; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vk_video/vulkan_video_codecs_common.h b/dependencies/vulkan/include/vk_video/vulkan_video_codecs_common.h new file mode 100644 index 000000000000..5e6ef1db48ee --- /dev/null +++ b/dependencies/vulkan/include/vk_video/vulkan_video_codecs_common.h @@ -0,0 +1,36 @@ +#ifndef VULKAN_VIDEO_CODECS_COMMON_H_ +#define VULKAN_VIDEO_CODECS_COMMON_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// vulkan_video_codecs_common is a preprocessor guard. Do not pass it to API calls. +#define vulkan_video_codecs_common 1 +#if !defined(VK_NO_STDINT_H) + #include +#endif + +#define VK_MAKE_VIDEO_STD_VERSION(major, minor, patch) \ + ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vk_icd.h b/dependencies/vulkan/include/vulkan/vk_icd.h new file mode 100644 index 000000000000..59204a3419f5 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vk_icd.h @@ -0,0 +1,244 @@ +/* + * Copyright 2015-2023 The Khronos Group Inc. + * Copyright 2015-2023 Valve Corporation + * Copyright 2015-2023 LunarG, Inc. + * + * SPDX-License-Identifier: Apache-2.0 + */ +#pragma once + +#include "vulkan.h" +#include + +// Loader-ICD version negotiation API. Versions add the following features: +// Version 0 - Initial. Doesn't support vk_icdGetInstanceProcAddr +// or vk_icdNegotiateLoaderICDInterfaceVersion. +// Version 1 - Add support for vk_icdGetInstanceProcAddr. +// Version 2 - Add Loader/ICD Interface version negotiation +// via vk_icdNegotiateLoaderICDInterfaceVersion. +// Version 3 - Add ICD creation/destruction of KHR_surface objects. +// Version 4 - Add unknown physical device extension querying via +// vk_icdGetPhysicalDeviceProcAddr. +// Version 5 - Tells ICDs that the loader is now paying attention to the +// application version of Vulkan passed into the ApplicationInfo +// structure during vkCreateInstance. This will tell the ICD +// that if the loader is older, it should automatically fail a +// call for any API version > 1.0. Otherwise, the loader will +// manually determine if it can support the expected version. +// Version 6 - Add support for vk_icdEnumerateAdapterPhysicalDevices. +// Version 7 - If an ICD supports any of the following functions, they must be +// queryable with vk_icdGetInstanceProcAddr: +// vk_icdNegotiateLoaderICDInterfaceVersion +// vk_icdGetPhysicalDeviceProcAddr +// vk_icdEnumerateAdapterPhysicalDevices (Windows only) +// In addition, these functions no longer need to be exported directly. +// This version allows drivers provided through the extension +// VK_LUNARG_direct_driver_loading be able to support the entire +// Driver-Loader interface. + +#define CURRENT_LOADER_ICD_INTERFACE_VERSION 7 +#define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0 +#define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4 + +// Old typedefs that don't follow a proper naming convention but are preserved for compatibility +typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion); +// This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this +// file directly, it won't be found. +#ifndef PFN_GetPhysicalDeviceProcAddr +typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char *pName); +#endif + +// Typedefs for loader/ICD interface +typedef VkResult (VKAPI_PTR *PFN_vk_icdNegotiateLoaderICDInterfaceVersion)(uint32_t* pVersion); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) +typedef VkResult (VKAPI_PTR *PFN_vk_icdEnumerateAdapterPhysicalDevices)(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif + +// Prototypes for loader/ICD interface +#if !defined(VK_NO_PROTOTYPES) +#ifdef __cplusplus +extern "C" { +#endif + VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pVersion); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName); + VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName); +#if defined(VK_USE_PLATFORM_WIN32_KHR) + VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID, + uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +#endif +#ifdef __cplusplus +} +#endif +#endif + +/* + * The ICD must reserve space for a pointer for the loader's dispatch + * table, at the start of . + * The ICD must initialize this variable using the SET_LOADER_MAGIC_VALUE macro. + */ + +#define ICD_LOADER_MAGIC 0x01CDC0DE + +typedef union { + uintptr_t loaderMagic; + void *loaderData; +} VK_LOADER_DATA; + +static inline void set_loader_magic_value(void *pNewObject) { + VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject; + loader_info->loaderMagic = ICD_LOADER_MAGIC; +} + +static inline bool valid_loader_magic_value(void *pNewObject) { + const VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject; + return (loader_info->loaderMagic & 0xffffffff) == ICD_LOADER_MAGIC; +} + +/* + * Windows and Linux ICDs will treat VkSurfaceKHR as a pointer to a struct that + * contains the platform-specific connection and surface information. + */ +typedef enum { + VK_ICD_WSI_PLATFORM_MIR, + VK_ICD_WSI_PLATFORM_WAYLAND, + VK_ICD_WSI_PLATFORM_WIN32, + VK_ICD_WSI_PLATFORM_XCB, + VK_ICD_WSI_PLATFORM_XLIB, + VK_ICD_WSI_PLATFORM_ANDROID, + VK_ICD_WSI_PLATFORM_MACOS, + VK_ICD_WSI_PLATFORM_IOS, + VK_ICD_WSI_PLATFORM_DISPLAY, + VK_ICD_WSI_PLATFORM_HEADLESS, + VK_ICD_WSI_PLATFORM_METAL, + VK_ICD_WSI_PLATFORM_DIRECTFB, + VK_ICD_WSI_PLATFORM_VI, + VK_ICD_WSI_PLATFORM_GGP, + VK_ICD_WSI_PLATFORM_SCREEN, + VK_ICD_WSI_PLATFORM_FUCHSIA, +} VkIcdWsiPlatform; + +typedef struct { + VkIcdWsiPlatform platform; +} VkIcdSurfaceBase; + +#ifdef VK_USE_PLATFORM_MIR_KHR +typedef struct { + VkIcdSurfaceBase base; + MirConnection *connection; + MirSurface *mirSurface; +} VkIcdSurfaceMir; +#endif // VK_USE_PLATFORM_MIR_KHR + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +typedef struct { + VkIcdSurfaceBase base; + struct wl_display *display; + struct wl_surface *surface; +} VkIcdSurfaceWayland; +#endif // VK_USE_PLATFORM_WAYLAND_KHR + +#ifdef VK_USE_PLATFORM_WIN32_KHR +typedef struct { + VkIcdSurfaceBase base; + HINSTANCE hinstance; + HWND hwnd; +} VkIcdSurfaceWin32; +#endif // VK_USE_PLATFORM_WIN32_KHR + +#ifdef VK_USE_PLATFORM_XCB_KHR +typedef struct { + VkIcdSurfaceBase base; + xcb_connection_t *connection; + xcb_window_t window; +} VkIcdSurfaceXcb; +#endif // VK_USE_PLATFORM_XCB_KHR + +#ifdef VK_USE_PLATFORM_XLIB_KHR +typedef struct { + VkIcdSurfaceBase base; + Display *dpy; + Window window; +} VkIcdSurfaceXlib; +#endif // VK_USE_PLATFORM_XLIB_KHR + +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +typedef struct { + VkIcdSurfaceBase base; + IDirectFB *dfb; + IDirectFBSurface *surface; +} VkIcdSurfaceDirectFB; +#endif // VK_USE_PLATFORM_DIRECTFB_EXT + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +typedef struct { + VkIcdSurfaceBase base; + struct ANativeWindow *window; +} VkIcdSurfaceAndroid; +#endif // VK_USE_PLATFORM_ANDROID_KHR + +#ifdef VK_USE_PLATFORM_MACOS_MVK +typedef struct { + VkIcdSurfaceBase base; + const void *pView; +} VkIcdSurfaceMacOS; +#endif // VK_USE_PLATFORM_MACOS_MVK + +#ifdef VK_USE_PLATFORM_IOS_MVK +typedef struct { + VkIcdSurfaceBase base; + const void *pView; +} VkIcdSurfaceIOS; +#endif // VK_USE_PLATFORM_IOS_MVK + +#ifdef VK_USE_PLATFORM_GGP +typedef struct { + VkIcdSurfaceBase base; + GgpStreamDescriptor streamDescriptor; +} VkIcdSurfaceGgp; +#endif // VK_USE_PLATFORM_GGP + +typedef struct { + VkIcdSurfaceBase base; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkIcdSurfaceDisplay; + +typedef struct { + VkIcdSurfaceBase base; +} VkIcdSurfaceHeadless; + +#ifdef VK_USE_PLATFORM_METAL_EXT +typedef struct { + VkIcdSurfaceBase base; + const CAMetalLayer *pLayer; +} VkIcdSurfaceMetal; +#endif // VK_USE_PLATFORM_METAL_EXT + +#ifdef VK_USE_PLATFORM_VI_NN +typedef struct { + VkIcdSurfaceBase base; + void *window; +} VkIcdSurfaceVi; +#endif // VK_USE_PLATFORM_VI_NN + +#ifdef VK_USE_PLATFORM_SCREEN_QNX +typedef struct { + VkIcdSurfaceBase base; + struct _screen_context *context; + struct _screen_window *window; +} VkIcdSurfaceScreen; +#endif // VK_USE_PLATFORM_SCREEN_QNX + +#ifdef VK_USE_PLATFORM_FUCHSIA +typedef struct { + VkIcdSurfaceBase base; +} VkIcdSurfaceImagePipe; +#endif // VK_USE_PLATFORM_FUCHSIA diff --git a/dependencies/vulkan/include/vulkan/vk_layer.h b/dependencies/vulkan/include/vulkan/vk_layer.h new file mode 100644 index 000000000000..19d88fce4bac --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vk_layer.h @@ -0,0 +1,189 @@ +/* + * Copyright 2015-2023 The Khronos Group Inc. + * Copyright 2015-2023 Valve Corporation + * Copyright 2015-2023 LunarG, Inc. + * + * SPDX-License-Identifier: Apache-2.0 + */ +#pragma once + +/* Need to define dispatch table + * Core struct can then have ptr to dispatch table at the top + * Along with object ptrs for current and next OBJ + */ + +#include "vulkan_core.h" + +#define MAX_NUM_UNKNOWN_EXTS 250 + + // Loader-Layer version negotiation API. Versions add the following features: + // Versions 0/1 - Initial. Doesn't support vk_layerGetPhysicalDeviceProcAddr + // or vk_icdNegotiateLoaderLayerInterfaceVersion. + // Version 2 - Add support for vk_layerGetPhysicalDeviceProcAddr and + // vk_icdNegotiateLoaderLayerInterfaceVersion. +#define CURRENT_LOADER_LAYER_INTERFACE_VERSION 2 +#define MIN_SUPPORTED_LOADER_LAYER_INTERFACE_VERSION 1 + +#define VK_CURRENT_CHAIN_VERSION 1 + +// Typedef for use in the interfaces below +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName); + +// Version negotiation values +typedef enum VkNegotiateLayerStructType { + LAYER_NEGOTIATE_UNINTIALIZED = 0, + LAYER_NEGOTIATE_INTERFACE_STRUCT = 1, +} VkNegotiateLayerStructType; + +// Version negotiation structures +typedef struct VkNegotiateLayerInterface { + VkNegotiateLayerStructType sType; + void *pNext; + uint32_t loaderLayerInterfaceVersion; + PFN_vkGetInstanceProcAddr pfnGetInstanceProcAddr; + PFN_vkGetDeviceProcAddr pfnGetDeviceProcAddr; + PFN_GetPhysicalDeviceProcAddr pfnGetPhysicalDeviceProcAddr; +} VkNegotiateLayerInterface; + +// Version negotiation functions +typedef VkResult (VKAPI_PTR *PFN_vkNegotiateLoaderLayerInterfaceVersion)(VkNegotiateLayerInterface *pVersionStruct); + +// Function prototype for unknown physical device extension command +typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device); + +// ------------------------------------------------------------------------------------------------ +// CreateInstance and CreateDevice support structures + +/* Sub type of structure for instance and device loader ext of CreateInfo. + * When sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO + * or sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO + * then VkLayerFunction indicates struct type pointed to by pNext + */ +typedef enum VkLayerFunction_ { + VK_LAYER_LINK_INFO = 0, + VK_LOADER_DATA_CALLBACK = 1, + VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2, + VK_LOADER_FEATURES = 3, +} VkLayerFunction; + +typedef struct VkLayerInstanceLink_ { + struct VkLayerInstanceLink_ *pNext; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; + PFN_GetPhysicalDeviceProcAddr pfnNextGetPhysicalDeviceProcAddr; +} VkLayerInstanceLink; + +/* + * When creating the device chain the loader needs to pass + * down information about it's device structure needed at + * the end of the chain. Passing the data via the + * VkLayerDeviceInfo avoids issues with finding the + * exact instance being used. + */ +typedef struct VkLayerDeviceInfo_ { + void *device_info; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; +} VkLayerDeviceInfo; + +typedef VkResult (VKAPI_PTR *PFN_vkSetInstanceLoaderData)(VkInstance instance, + void *object); +typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device, + void *object); +typedef VkResult (VKAPI_PTR *PFN_vkLayerCreateDevice)(VkInstance instance, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, + const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA); +typedef void (VKAPI_PTR *PFN_vkLayerDestroyDevice)(VkDevice physicalDevice, const VkAllocationCallbacks *pAllocator, PFN_vkDestroyDevice destroyFunction); + +typedef enum VkLoaderFeastureFlagBits { + VK_LOADER_FEATURE_PHYSICAL_DEVICE_SORTING = 0x00000001, +} VkLoaderFlagBits; +typedef VkFlags VkLoaderFeatureFlags; + +typedef struct { + VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO + const void *pNext; + VkLayerFunction function; + union { + VkLayerInstanceLink *pLayerInfo; + PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData; + struct { + PFN_vkLayerCreateDevice pfnLayerCreateDevice; + PFN_vkLayerDestroyDevice pfnLayerDestroyDevice; + } layerDevice; + VkLoaderFeatureFlags loaderFeatures; + } u; +} VkLayerInstanceCreateInfo; + +typedef struct VkLayerDeviceLink_ { + struct VkLayerDeviceLink_ *pNext; + PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr; + PFN_vkGetDeviceProcAddr pfnNextGetDeviceProcAddr; +} VkLayerDeviceLink; + +typedef struct { + VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO + const void *pNext; + VkLayerFunction function; + union { + VkLayerDeviceLink *pLayerInfo; + PFN_vkSetDeviceLoaderData pfnSetDeviceLoaderData; + } u; +} VkLayerDeviceCreateInfo; + +#ifdef __cplusplus +extern "C" { +#endif + +VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct); + +typedef enum VkChainType { + VK_CHAIN_TYPE_UNKNOWN = 0, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES = 1, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES = 2, + VK_CHAIN_TYPE_ENUMERATE_INSTANCE_VERSION = 3, +} VkChainType; + +typedef struct VkChainHeader { + VkChainType type; + uint32_t version; + uint32_t size; +} VkChainHeader; + +typedef struct VkEnumerateInstanceExtensionPropertiesChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceExtensionPropertiesChain *, const char *, uint32_t *, + VkExtensionProperties *); + const struct VkEnumerateInstanceExtensionPropertiesChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties) const { + return pfnNextLayer(pNextLink, pLayerName, pPropertyCount, pProperties); + } +#endif +} VkEnumerateInstanceExtensionPropertiesChain; + +typedef struct VkEnumerateInstanceLayerPropertiesChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceLayerPropertiesChain *, uint32_t *, VkLayerProperties *); + const struct VkEnumerateInstanceLayerPropertiesChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(uint32_t *pPropertyCount, VkLayerProperties *pProperties) const { + return pfnNextLayer(pNextLink, pPropertyCount, pProperties); + } +#endif +} VkEnumerateInstanceLayerPropertiesChain; + +typedef struct VkEnumerateInstanceVersionChain { + VkChainHeader header; + VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceVersionChain *, uint32_t *); + const struct VkEnumerateInstanceVersionChain *pNextLink; + +#if defined(__cplusplus) + inline VkResult CallDown(uint32_t *pApiVersion) const { + return pfnNextLayer(pNextLink, pApiVersion); + } +#endif +} VkEnumerateInstanceVersionChain; + +#ifdef __cplusplus +} +#endif diff --git a/dependencies/vulkan/include/vulkan/vk_platform.h b/dependencies/vulkan/include/vulkan/vk_platform.h new file mode 100644 index 000000000000..0ecd4f647199 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vk_platform.h @@ -0,0 +1,84 @@ +// +// File: vk_platform.h +// +/* +** Copyright 2014-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + + +#ifndef VK_PLATFORM_H_ +#define VK_PLATFORM_H_ + +#ifdef __cplusplus +extern "C" +{ +#endif // __cplusplus + +/* +*************************************************************************************************** +* Platform-specific directives and type declarations +*************************************************************************************************** +*/ + +/* Platform-specific calling convention macros. + * + * Platforms should define these so that Vulkan clients call Vulkan commands + * with the same calling conventions that the Vulkan implementation expects. + * + * VKAPI_ATTR - Placed before the return type in function declarations. + * Useful for C++11 and GCC/Clang-style function attribute syntax. + * VKAPI_CALL - Placed after the return type in function declarations. + * Useful for MSVC-style calling convention syntax. + * VKAPI_PTR - Placed between the '(' and '*' in function pointer types. + * + * Function declaration: VKAPI_ATTR void VKAPI_CALL vkCommand(void); + * Function pointer type: typedef void (VKAPI_PTR *PFN_vkCommand)(void); + */ +#if defined(_WIN32) + // On Windows, Vulkan commands use the stdcall convention + #define VKAPI_ATTR + #define VKAPI_CALL __stdcall + #define VKAPI_PTR VKAPI_CALL +#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH < 7 + #error "Vulkan is not supported for the 'armeabi' NDK ABI" +#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH >= 7 && defined(__ARM_32BIT_STATE) + // On Android 32-bit ARM targets, Vulkan functions use the "hardfloat" + // calling convention, i.e. float parameters are passed in registers. This + // is true even if the rest of the application passes floats on the stack, + // as it does by default when compiling for the armeabi-v7a NDK ABI. + #define VKAPI_ATTR __attribute__((pcs("aapcs-vfp"))) + #define VKAPI_CALL + #define VKAPI_PTR VKAPI_ATTR +#else + // On other platforms, use the default calling convention + #define VKAPI_ATTR + #define VKAPI_CALL + #define VKAPI_PTR +#endif + +#if !defined(VK_NO_STDDEF_H) + #include +#endif // !defined(VK_NO_STDDEF_H) + +#if !defined(VK_NO_STDINT_H) + #if defined(_MSC_VER) && (_MSC_VER < 1600) + typedef signed __int8 int8_t; + typedef unsigned __int8 uint8_t; + typedef signed __int16 int16_t; + typedef unsigned __int16 uint16_t; + typedef signed __int32 int32_t; + typedef unsigned __int32 uint32_t; + typedef signed __int64 int64_t; + typedef unsigned __int64 uint64_t; + #else + #include + #endif +#endif // !defined(VK_NO_STDINT_H) + +#ifdef __cplusplus +} // extern "C" +#endif // __cplusplus + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan.h b/dependencies/vulkan/include/vulkan/vulkan.h new file mode 100644 index 000000000000..ef94006bb319 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan.h @@ -0,0 +1,99 @@ +#ifndef VULKAN_H_ +#define VULKAN_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +#include "vk_platform.h" +#include "vulkan_core.h" + +#ifdef VK_USE_PLATFORM_ANDROID_KHR +#include "vulkan_android.h" +#endif + +#ifdef VK_USE_PLATFORM_FUCHSIA +#include +#include "vulkan_fuchsia.h" +#endif + +#ifdef VK_USE_PLATFORM_IOS_MVK +#include "vulkan_ios.h" +#endif + + +#ifdef VK_USE_PLATFORM_MACOS_MVK +#include "vulkan_macos.h" +#endif + +#ifdef VK_USE_PLATFORM_METAL_EXT +#include "vulkan_metal.h" +#endif + +#ifdef VK_USE_PLATFORM_VI_NN +#include "vulkan_vi.h" +#endif + + +#ifdef VK_USE_PLATFORM_WAYLAND_KHR +#include "vulkan_wayland.h" +#endif + + +#ifdef VK_USE_PLATFORM_WIN32_KHR +#include +#include "vulkan_win32.h" +#endif + + +#ifdef VK_USE_PLATFORM_XCB_KHR +#include +#include "vulkan_xcb.h" +#endif + + +#ifdef VK_USE_PLATFORM_XLIB_KHR +#include +#include "vulkan_xlib.h" +#endif + + +#ifdef VK_USE_PLATFORM_DIRECTFB_EXT +#include +#include "vulkan_directfb.h" +#endif + + +#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT +#include +#include +#include "vulkan_xlib_xrandr.h" +#endif + + +#ifdef VK_USE_PLATFORM_GGP +#include +#include "vulkan_ggp.h" +#endif + + +#ifdef VK_USE_PLATFORM_SCREEN_QNX +#include +#include "vulkan_screen.h" +#endif + + +#ifdef VK_USE_PLATFORM_SCI +#include +#include +#include "vulkan_sci.h" +#endif + + +#ifdef VK_ENABLE_BETA_EXTENSIONS +#include "vulkan_beta.h" +#endif + +#endif // VULKAN_H_ diff --git a/dependencies/vulkan/include/vulkan/vulkan_android.h b/dependencies/vulkan/include/vulkan/vulkan_android.h new file mode 100644 index 000000000000..61ff40ba8cd2 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_android.h @@ -0,0 +1,153 @@ +#ifndef VULKAN_ANDROID_H_ +#define VULKAN_ANDROID_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_android_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_android_surface 1 +struct ANativeWindow; +#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6 +#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface" +typedef VkFlags VkAndroidSurfaceCreateFlagsKHR; +typedef struct VkAndroidSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAndroidSurfaceCreateFlagsKHR flags; + struct ANativeWindow* window; +} VkAndroidSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR( + VkInstance instance, + const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_ANDROID_external_memory_android_hardware_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_ANDROID_external_memory_android_hardware_buffer 1 +struct AHardwareBuffer; +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 5 +#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME "VK_ANDROID_external_memory_android_hardware_buffer" +typedef struct VkAndroidHardwareBufferUsageANDROID { + VkStructureType sType; + void* pNext; + uint64_t androidHardwareBufferUsage; +} VkAndroidHardwareBufferUsageANDROID; + +typedef struct VkAndroidHardwareBufferPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeBits; +} VkAndroidHardwareBufferPropertiesANDROID; + +typedef struct VkAndroidHardwareBufferFormatPropertiesANDROID { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + VkFormatFeatureFlags formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkAndroidHardwareBufferFormatPropertiesANDROID; + +typedef struct VkImportAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + struct AHardwareBuffer* buffer; +} VkImportAndroidHardwareBufferInfoANDROID; + +typedef struct VkMemoryGetAndroidHardwareBufferInfoANDROID { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkMemoryGetAndroidHardwareBufferInfoANDROID; + +typedef struct VkExternalFormatANDROID { + VkStructureType sType; + void* pNext; + uint64_t externalFormat; +} VkExternalFormatANDROID; + +typedef struct VkAndroidHardwareBufferFormatProperties2ANDROID { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + VkFormatFeatureFlags2 formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkAndroidHardwareBufferFormatProperties2ANDROID; + +typedef VkResult (VKAPI_PTR *PFN_vkGetAndroidHardwareBufferPropertiesANDROID)(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryAndroidHardwareBufferANDROID)(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID( + VkDevice device, + const struct AHardwareBuffer* buffer, + VkAndroidHardwareBufferPropertiesANDROID* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID( + VkDevice device, + const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, + struct AHardwareBuffer** pBuffer); +#endif + + +// VK_ANDROID_external_format_resolve is a preprocessor guard. Do not pass it to API calls. +#define VK_ANDROID_external_format_resolve 1 +#define VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_SPEC_VERSION 1 +#define VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_EXTENSION_NAME "VK_ANDROID_external_format_resolve" +typedef struct VkPhysicalDeviceExternalFormatResolveFeaturesANDROID { + VkStructureType sType; + void* pNext; + VkBool32 externalFormatResolve; +} VkPhysicalDeviceExternalFormatResolveFeaturesANDROID; + +typedef struct VkPhysicalDeviceExternalFormatResolvePropertiesANDROID { + VkStructureType sType; + void* pNext; + VkBool32 nullColorAttachmentWithExternalFormatResolve; + VkChromaLocation externalFormatResolveChromaOffsetX; + VkChromaLocation externalFormatResolveChromaOffsetY; +} VkPhysicalDeviceExternalFormatResolvePropertiesANDROID; + +typedef struct VkAndroidHardwareBufferFormatResolvePropertiesANDROID { + VkStructureType sType; + void* pNext; + VkFormat colorAttachmentFormat; +} VkAndroidHardwareBufferFormatResolvePropertiesANDROID; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_beta.h b/dependencies/vulkan/include/vulkan/vulkan_beta.h new file mode 100644 index 000000000000..df18b4042b17 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_beta.h @@ -0,0 +1,216 @@ +#ifndef VULKAN_BETA_H_ +#define VULKAN_BETA_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_portability_subset is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_portability_subset 1 +#define VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION 1 +#define VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME "VK_KHR_portability_subset" +typedef struct VkPhysicalDevicePortabilitySubsetFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 constantAlphaColorBlendFactors; + VkBool32 events; + VkBool32 imageViewFormatReinterpretation; + VkBool32 imageViewFormatSwizzle; + VkBool32 imageView2DOn3DImage; + VkBool32 multisampleArrayImage; + VkBool32 mutableComparisonSamplers; + VkBool32 pointPolygons; + VkBool32 samplerMipLodBias; + VkBool32 separateStencilMaskRef; + VkBool32 shaderSampleRateInterpolationFunctions; + VkBool32 tessellationIsolines; + VkBool32 tessellationPointMode; + VkBool32 triangleFans; + VkBool32 vertexAttributeAccessBeyondStride; +} VkPhysicalDevicePortabilitySubsetFeaturesKHR; + +typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t minVertexInputBindingStrideAlignment; +} VkPhysicalDevicePortabilitySubsetPropertiesKHR; + + + +// VK_AMDX_shader_enqueue is a preprocessor guard. Do not pass it to API calls. +#define VK_AMDX_shader_enqueue 1 +#define VK_AMDX_SHADER_ENQUEUE_SPEC_VERSION 1 +#define VK_AMDX_SHADER_ENQUEUE_EXTENSION_NAME "VK_AMDX_shader_enqueue" +#define VK_SHADER_INDEX_UNUSED_AMDX (~0U) +typedef struct VkPhysicalDeviceShaderEnqueueFeaturesAMDX { + VkStructureType sType; + void* pNext; + VkBool32 shaderEnqueue; +} VkPhysicalDeviceShaderEnqueueFeaturesAMDX; + +typedef struct VkPhysicalDeviceShaderEnqueuePropertiesAMDX { + VkStructureType sType; + void* pNext; + uint32_t maxExecutionGraphDepth; + uint32_t maxExecutionGraphShaderOutputNodes; + uint32_t maxExecutionGraphShaderPayloadSize; + uint32_t maxExecutionGraphShaderPayloadCount; + uint32_t executionGraphDispatchAddressAlignment; +} VkPhysicalDeviceShaderEnqueuePropertiesAMDX; + +typedef struct VkExecutionGraphPipelineScratchSizeAMDX { + VkStructureType sType; + void* pNext; + VkDeviceSize size; +} VkExecutionGraphPipelineScratchSizeAMDX; + +typedef struct VkExecutionGraphPipelineCreateInfoAMDX { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkExecutionGraphPipelineCreateInfoAMDX; + +typedef union VkDeviceOrHostAddressConstAMDX { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstAMDX; + +typedef struct VkDispatchGraphInfoAMDX { + uint32_t nodeIndex; + uint32_t payloadCount; + VkDeviceOrHostAddressConstAMDX payloads; + uint64_t payloadStride; +} VkDispatchGraphInfoAMDX; + +typedef struct VkDispatchGraphCountInfoAMDX { + uint32_t count; + VkDeviceOrHostAddressConstAMDX infos; + uint64_t stride; +} VkDispatchGraphCountInfoAMDX; + +typedef struct VkPipelineShaderStageNodeCreateInfoAMDX { + VkStructureType sType; + const void* pNext; + const char* pName; + uint32_t index; +} VkPipelineShaderStageNodeCreateInfoAMDX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateExecutionGraphPipelinesAMDX)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineScratchSizeAMDX)(VkDevice device, VkPipeline executionGraph, VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetExecutionGraphPipelineNodeIndexAMDX)(VkDevice device, VkPipeline executionGraph, const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, uint32_t* pNodeIndex); +typedef void (VKAPI_PTR *PFN_vkCmdInitializeGraphScratchMemoryAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, const VkDispatchGraphCountInfoAMDX* pCountInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchGraphIndirectCountAMDX)(VkCommandBuffer commandBuffer, VkDeviceAddress scratch, VkDeviceAddress countInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateExecutionGraphPipelinesAMDX( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkExecutionGraphPipelineCreateInfoAMDX* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineScratchSizeAMDX( + VkDevice device, + VkPipeline executionGraph, + VkExecutionGraphPipelineScratchSizeAMDX* pSizeInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetExecutionGraphPipelineNodeIndexAMDX( + VkDevice device, + VkPipeline executionGraph, + const VkPipelineShaderStageNodeCreateInfoAMDX* pNodeInfo, + uint32_t* pNodeIndex); + +VKAPI_ATTR void VKAPI_CALL vkCmdInitializeGraphScratchMemoryAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + const VkDispatchGraphCountInfoAMDX* pCountInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + const VkDispatchGraphCountInfoAMDX* pCountInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchGraphIndirectCountAMDX( + VkCommandBuffer commandBuffer, + VkDeviceAddress scratch, + VkDeviceAddress countInfo); +#endif + + +// VK_NV_displacement_micromap is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_displacement_micromap 1 +#define VK_NV_DISPLACEMENT_MICROMAP_SPEC_VERSION 2 +#define VK_NV_DISPLACEMENT_MICROMAP_EXTENSION_NAME "VK_NV_displacement_micromap" + +typedef enum VkDisplacementMicromapFormatNV { + VK_DISPLACEMENT_MICROMAP_FORMAT_64_TRIANGLES_64_BYTES_NV = 1, + VK_DISPLACEMENT_MICROMAP_FORMAT_256_TRIANGLES_128_BYTES_NV = 2, + VK_DISPLACEMENT_MICROMAP_FORMAT_1024_TRIANGLES_128_BYTES_NV = 3, + VK_DISPLACEMENT_MICROMAP_FORMAT_MAX_ENUM_NV = 0x7FFFFFFF +} VkDisplacementMicromapFormatNV; +typedef struct VkPhysicalDeviceDisplacementMicromapFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 displacementMicromap; +} VkPhysicalDeviceDisplacementMicromapFeaturesNV; + +typedef struct VkPhysicalDeviceDisplacementMicromapPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxDisplacementMicromapSubdivisionLevel; +} VkPhysicalDeviceDisplacementMicromapPropertiesNV; + +typedef struct VkAccelerationStructureTrianglesDisplacementMicromapNV { + VkStructureType sType; + void* pNext; + VkFormat displacementBiasAndScaleFormat; + VkFormat displacementVectorFormat; + VkDeviceOrHostAddressConstKHR displacementBiasAndScaleBuffer; + VkDeviceSize displacementBiasAndScaleStride; + VkDeviceOrHostAddressConstKHR displacementVectorBuffer; + VkDeviceSize displacementVectorStride; + VkDeviceOrHostAddressConstKHR displacedMicromapPrimitiveFlags; + VkDeviceSize displacedMicromapPrimitiveFlagsStride; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexBuffer; + VkDeviceSize indexStride; + uint32_t baseTriangle; + uint32_t usageCountsCount; + const VkMicromapUsageEXT* pUsageCounts; + const VkMicromapUsageEXT* const* ppUsageCounts; + VkMicromapEXT micromap; +} VkAccelerationStructureTrianglesDisplacementMicromapNV; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_core.h b/dependencies/vulkan/include/vulkan/vulkan_core.h new file mode 100644 index 000000000000..e6c16498af53 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_core.h @@ -0,0 +1,20392 @@ +#ifndef VULKAN_CORE_H_ +#define VULKAN_CORE_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_VERSION_1_0 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_0 1 +#include "vk_platform.h" + +#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object; + + +#ifndef VK_USE_64_BIT_PTR_DEFINES + #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) || (defined(__riscv) && __riscv_xlen == 64) + #define VK_USE_64_BIT_PTR_DEFINES 1 + #else + #define VK_USE_64_BIT_PTR_DEFINES 0 + #endif +#endif + + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #if (defined(__cplusplus) && (__cplusplus >= 201103L)) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201103L)) + #define VK_NULL_HANDLE nullptr + #else + #define VK_NULL_HANDLE ((void*)0) + #endif + #else + #define VK_NULL_HANDLE 0ULL + #endif +#endif +#ifndef VK_NULL_HANDLE + #define VK_NULL_HANDLE 0 +#endif + + +#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE + #if (VK_USE_64_BIT_PTR_DEFINES==1) + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object; + #else + #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; + #endif +#endif + +#define VK_MAKE_API_VERSION(variant, major, minor, patch) \ + ((((uint32_t)(variant)) << 29U) | (((uint32_t)(major)) << 22U) | (((uint32_t)(minor)) << 12U) | ((uint32_t)(patch))) + +// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead. +//#define VK_API_VERSION VK_MAKE_API_VERSION(0, 1, 0, 0) // Patch version should always be set to 0 + +// Vulkan 1.0 version number +#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0 + +// Version of this file +#define VK_HEADER_VERSION 296 + +// Complete version of this file +#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION) + +// VK_MAKE_VERSION is deprecated, but no reason was given in the API XML +// DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead. +#define VK_MAKE_VERSION(major, minor, patch) \ + ((((uint32_t)(major)) << 22U) | (((uint32_t)(minor)) << 12U) | ((uint32_t)(patch))) + +// VK_VERSION_MAJOR is deprecated, but no reason was given in the API XML +// DEPRECATED: This define is deprecated. VK_API_VERSION_MAJOR should be used instead. +#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22U) + +// VK_VERSION_MINOR is deprecated, but no reason was given in the API XML +// DEPRECATED: This define is deprecated. VK_API_VERSION_MINOR should be used instead. +#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12U) & 0x3FFU) + +// VK_VERSION_PATCH is deprecated, but no reason was given in the API XML +// DEPRECATED: This define is deprecated. VK_API_VERSION_PATCH should be used instead. +#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) + +#define VK_API_VERSION_VARIANT(version) ((uint32_t)(version) >> 29U) +#define VK_API_VERSION_MAJOR(version) (((uint32_t)(version) >> 22U) & 0x7FU) +#define VK_API_VERSION_MINOR(version) (((uint32_t)(version) >> 12U) & 0x3FFU) +#define VK_API_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) +typedef uint32_t VkBool32; +typedef uint64_t VkDeviceAddress; +typedef uint64_t VkDeviceSize; +typedef uint32_t VkFlags; +typedef uint32_t VkSampleMask; +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) +VK_DEFINE_HANDLE(VkInstance) +VK_DEFINE_HANDLE(VkPhysicalDevice) +VK_DEFINE_HANDLE(VkDevice) +VK_DEFINE_HANDLE(VkQueue) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) +VK_DEFINE_HANDLE(VkCommandBuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) +#define VK_ATTACHMENT_UNUSED (~0U) +#define VK_FALSE 0U +#define VK_LOD_CLAMP_NONE 1000.0F +#define VK_QUEUE_FAMILY_IGNORED (~0U) +#define VK_REMAINING_ARRAY_LAYERS (~0U) +#define VK_REMAINING_MIP_LEVELS (~0U) +#define VK_SUBPASS_EXTERNAL (~0U) +#define VK_TRUE 1U +#define VK_WHOLE_SIZE (~0ULL) +#define VK_MAX_MEMORY_TYPES 32U +#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256U +#define VK_UUID_SIZE 16U +#define VK_MAX_EXTENSION_NAME_SIZE 256U +#define VK_MAX_DESCRIPTION_SIZE 256U +#define VK_MAX_MEMORY_HEAPS 16U + +typedef enum VkResult { + VK_SUCCESS = 0, + VK_NOT_READY = 1, + VK_TIMEOUT = 2, + VK_EVENT_SET = 3, + VK_EVENT_RESET = 4, + VK_INCOMPLETE = 5, + VK_ERROR_OUT_OF_HOST_MEMORY = -1, + VK_ERROR_OUT_OF_DEVICE_MEMORY = -2, + VK_ERROR_INITIALIZATION_FAILED = -3, + VK_ERROR_DEVICE_LOST = -4, + VK_ERROR_MEMORY_MAP_FAILED = -5, + VK_ERROR_LAYER_NOT_PRESENT = -6, + VK_ERROR_EXTENSION_NOT_PRESENT = -7, + VK_ERROR_FEATURE_NOT_PRESENT = -8, + VK_ERROR_INCOMPATIBLE_DRIVER = -9, + VK_ERROR_TOO_MANY_OBJECTS = -10, + VK_ERROR_FORMAT_NOT_SUPPORTED = -11, + VK_ERROR_FRAGMENTED_POOL = -12, + VK_ERROR_UNKNOWN = -13, + VK_ERROR_OUT_OF_POOL_MEMORY = -1000069000, + VK_ERROR_INVALID_EXTERNAL_HANDLE = -1000072003, + VK_ERROR_FRAGMENTATION = -1000161000, + VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS = -1000257000, + VK_PIPELINE_COMPILE_REQUIRED = 1000297000, + VK_ERROR_SURFACE_LOST_KHR = -1000000000, + VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, + VK_SUBOPTIMAL_KHR = 1000001003, + VK_ERROR_OUT_OF_DATE_KHR = -1000001004, + VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, + VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, + VK_ERROR_INVALID_SHADER_NV = -1000012000, + VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR = -1000023000, + VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR = -1000023001, + VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR = -1000023002, + VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR = -1000023003, + VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR = -1000023004, + VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR = -1000023005, + VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000, + VK_ERROR_NOT_PERMITTED_KHR = -1000174001, + VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000, + VK_THREAD_IDLE_KHR = 1000268000, + VK_THREAD_DONE_KHR = 1000268001, + VK_OPERATION_DEFERRED_KHR = 1000268002, + VK_OPERATION_NOT_DEFERRED_KHR = 1000268003, + VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR = -1000299000, + VK_ERROR_COMPRESSION_EXHAUSTED_EXT = -1000338000, + VK_INCOMPATIBLE_SHADER_BINARY_EXT = 1000482000, + VK_PIPELINE_BINARY_MISSING_KHR = 1000483000, + VK_ERROR_NOT_ENOUGH_SPACE_KHR = -1000483000, + VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY, + VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE, + VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION, + VK_ERROR_NOT_PERMITTED_EXT = VK_ERROR_NOT_PERMITTED_KHR, + VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS, + VK_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, + VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED, + // VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT is a deprecated alias + VK_ERROR_INCOMPATIBLE_SHADER_BINARY_EXT = VK_INCOMPATIBLE_SHADER_BINARY_EXT, + VK_RESULT_MAX_ENUM = 0x7FFFFFFF +} VkResult; + +typedef enum VkStructureType { + VK_STRUCTURE_TYPE_APPLICATION_INFO = 0, + VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2, + VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3, + VK_STRUCTURE_TYPE_SUBMIT_INFO = 4, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5, + VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6, + VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7, + VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8, + VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9, + VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10, + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11, + VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12, + VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13, + VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14, + VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15, + VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16, + VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19, + VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23, + VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24, + VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26, + VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29, + VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30, + VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35, + VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36, + VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38, + VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42, + VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45, + VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46, + VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47, + VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES = 1000094000, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO = 1000157000, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO = 1000157001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES = 1000083000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS = 1000127000, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO = 1000127001, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO = 1000060000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO = 1000060003, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO = 1000060004, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO = 1000060005, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO = 1000060006, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO = 1000060013, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO = 1000060014, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES = 1000070000, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO = 1000070001, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2 = 1000146000, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2 = 1000146001, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 = 1000146002, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2 = 1000146003, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 = 1000146004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 = 1000059000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 1000059001, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2 = 1000059002, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2 = 1000059003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 = 1000059004, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2 = 1000059005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 = 1000059006, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2 = 1000059007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 = 1000059008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES = 1000117000, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO = 1000117001, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO = 1000117002, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO = 1000117003, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO = 1000053000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES = 1000053001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES = 1000053002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES = 1000120000, + VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO = 1000145000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES = 1000145001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES = 1000145002, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2 = 1000145003, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO = 1000156000, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO = 1000156001, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO = 1000156002, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO = 1000156003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES = 1000156004, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES = 1000156005, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO = 1000085000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO = 1000071000, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES = 1000071001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO = 1000071002, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES = 1000071003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES = 1000071004, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO = 1000072000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO = 1000072001, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO = 1000072002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO = 1000112000, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES = 1000112001, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO = 1000113000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO = 1000077000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO = 1000076000, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES = 1000076001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 1000168000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT = 1000168001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES = 1000063000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES = 49, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES = 50, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES = 51, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES = 52, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO = 1000147000, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2 = 1000109000, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2 = 1000109001, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2 = 1000109002, + VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2 = 1000109003, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2 = 1000109004, + VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO = 1000109005, + VK_STRUCTURE_TYPE_SUBPASS_END_INFO = 1000109006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES = 1000177000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES = 1000196000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES = 1000180000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES = 1000082000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES = 1000197000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO = 1000161000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES = 1000161001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES = 1000161002, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO = 1000161003, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT = 1000161004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES = 1000199000, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE = 1000199001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES = 1000221000, + VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO = 1000246000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES = 1000130000, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO = 1000130001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES = 1000211000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES = 1000108000, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO = 1000108001, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO = 1000108002, + VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO = 1000108003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES = 1000253000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES = 1000175000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES = 1000241000, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT = 1000241001, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT = 1000241002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES = 1000261000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES = 1000207000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES = 1000207001, + VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO = 1000207002, + VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO = 1000207003, + VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO = 1000207004, + VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO = 1000207005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES = 1000257000, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO = 1000244001, + VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO = 1000257002, + VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO = 1000257003, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO = 1000257004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES = 53, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES = 54, + VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO = 1000192000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES = 1000215000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES = 1000245000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES = 1000276000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES = 1000295000, + VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO = 1000295001, + VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO = 1000295002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES = 1000297000, + VK_STRUCTURE_TYPE_MEMORY_BARRIER_2 = 1000314000, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2 = 1000314001, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2 = 1000314002, + VK_STRUCTURE_TYPE_DEPENDENCY_INFO = 1000314003, + VK_STRUCTURE_TYPE_SUBMIT_INFO_2 = 1000314004, + VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO = 1000314005, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO = 1000314006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES = 1000314007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES = 1000325000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES = 1000335000, + VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2 = 1000337000, + VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2 = 1000337001, + VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2 = 1000337002, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2 = 1000337003, + VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2 = 1000337004, + VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2 = 1000337005, + VK_STRUCTURE_TYPE_BUFFER_COPY_2 = 1000337006, + VK_STRUCTURE_TYPE_IMAGE_COPY_2 = 1000337007, + VK_STRUCTURE_TYPE_IMAGE_BLIT_2 = 1000337008, + VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2 = 1000337009, + VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2 = 1000337010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES = 1000225000, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO = 1000225001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES = 1000225002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES = 1000138000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES = 1000138001, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK = 1000138002, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO = 1000138003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES = 1000066000, + VK_STRUCTURE_TYPE_RENDERING_INFO = 1000044000, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO = 1000044001, + VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO = 1000044002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES = 1000044003, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO = 1000044004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES = 1000280000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES = 1000280001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES = 1000281001, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3 = 1000360000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES = 1000413000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES = 1000413001, + VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS = 1000413002, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS = 1000413003, + VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, + VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007, + VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR = 1000060008, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR = 1000060009, + VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR = 1000060010, + VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR = 1000060011, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR = 1000060012, + VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000, + VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001, + VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000, + VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000, + VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000, + VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000, + VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000, + VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000, + VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, + VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, + VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, + VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR = 1000023000, + VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR = 1000023001, + VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR = 1000023002, + VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR = 1000023003, + VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR = 1000023004, + VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR = 1000023005, + VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000023006, + VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR = 1000023007, + VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR = 1000023008, + VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR = 1000023009, + VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR = 1000023010, + VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR = 1000023011, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR = 1000023012, + VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR = 1000023013, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR = 1000023014, + VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR = 1000023015, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR = 1000023016, + VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR = 1000024000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR = 1000024001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR = 1000024002, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, + VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT = 1000028000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT = 1000028001, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT = 1000028002, + VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX = 1000029000, + VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX = 1000029001, + VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX = 1000029002, + VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX = 1000030001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_KHR = 1000038000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000038001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR = 1000038002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PICTURE_INFO_KHR = 1000038003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_KHR = 1000038004, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_KHR = 1000038005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_GOP_REMAINING_FRAME_INFO_KHR = 1000038006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_KHR = 1000038007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_KHR = 1000038008, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_KHR = 1000038009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_CREATE_INFO_KHR = 1000038010, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_QUALITY_LEVEL_PROPERTIES_KHR = 1000038011, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_GET_INFO_KHR = 1000038012, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_FEEDBACK_INFO_KHR = 1000038013, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_KHR = 1000039000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000039001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR = 1000039002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PICTURE_INFO_KHR = 1000039003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_KHR = 1000039004, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_KHR = 1000039005, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_GOP_REMAINING_FRAME_INFO_KHR = 1000039006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_KHR = 1000039007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_KHR = 1000039009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_KHR = 1000039010, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_CREATE_INFO_KHR = 1000039011, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_QUALITY_LEVEL_PROPERTIES_KHR = 1000039012, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_GET_INFO_KHR = 1000039013, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_FEEDBACK_INFO_KHR = 1000039014, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR = 1000040000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR = 1000040001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR = 1000040003, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000040004, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR = 1000040005, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR = 1000040006, + VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006, + VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007, + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008, + VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009, + VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000, + VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000, + VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT = 1000067000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT = 1000067001, + VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT = 1000068000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT = 1000068001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT = 1000068002, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001, + VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002, + VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000, + VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001, + VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002, + VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001, + VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT = 1000081000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001, + VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002, + VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000, + VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, + VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001, + VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002, + VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, + VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, + VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT = 1000101000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT = 1000101001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT = 1000102000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT = 1000102001, + VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RELAXED_LINE_RASTERIZATION_FEATURES_IMG = 1000110000, + VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000, + VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000, + VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001, + VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002, + VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000, + VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR = 1000116000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR = 1000116001, + VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR = 1000116002, + VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR = 1000116003, + VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR = 1000116004, + VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR = 1000116005, + VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR = 1000116006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001, + VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002, + VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR = 1000121000, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR = 1000121001, + VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR = 1000121002, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR = 1000121003, + VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR = 1000121004, + VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000, + VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT = 1000128000, + VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT = 1000128001, + VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT = 1000128002, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT = 1000128003, + VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT = 1000128004, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID = 1000129000, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID = 1000129001, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID = 1000129002, + VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129003, + VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004, + VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID = 1000129006, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_FEATURES_AMDX = 1000134000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ENQUEUE_PROPERTIES_AMDX = 1000134001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_SCRATCH_SIZE_AMDX = 1000134002, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_EXECUTION_GRAPH_PIPELINE_CREATE_INFO_AMDX = 1000134003, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_NODE_CREATE_INFO_AMDX = 1000134004, +#endif + VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, + VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, + VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003, + VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR = 1000150007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR = 1000150000, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR = 1000150002, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR = 1000150003, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR = 1000150004, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR = 1000150005, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR = 1000150006, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR = 1000150009, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR = 1000150010, + VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR = 1000150011, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR = 1000150012, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR = 1000150013, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR = 1000150014, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR = 1000150017, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR = 1000150020, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR = 1000347000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR = 1000347001, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR = 1000150015, + VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR = 1000150016, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR = 1000150018, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR = 1000348013, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV = 1000154000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV = 1000154001, + VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT = 1000158000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT = 1000158002, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT = 1000158003, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT = 1000158004, + VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005, + VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT = 1000158006, + VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000, + VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR = 1000163000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR = 1000163001, +#endif + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV = 1000164005, + VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV = 1000165000, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV = 1000165001, + VK_STRUCTURE_TYPE_GEOMETRY_NV = 1000165003, + VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV = 1000165004, + VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV = 1000165005, + VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = 1000165006, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = 1000165007, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV = 1000165008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV = 1000165009, + VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV = 1000165011, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV = 1000165012, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV = 1000166000, + VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV = 1000166001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT = 1000170000, + VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT = 1000170001, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000, + VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR = 1000181000, + VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD = 1000183000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR = 1000187000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000187001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR = 1000187002, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR = 1000187003, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR = 1000187004, + VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR = 1000187005, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR = 1000174000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR = 1000388000, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR = 1000388001, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000, + VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP = 1000191000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV = 1000204000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV = 1000205000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000, + VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000, + VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001, + VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL = 1000210002, + VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL = 1000210003, + VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL = 1000210004, + VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL = 1000210005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT = 1000212000, + VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD = 1000213000, + VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD = 1000213001, + VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA = 1000214000, + VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT = 1000217000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001, + VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002, + VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000226000, + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR = 1000226001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR = 1000226002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR = 1000226003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR = 1000226004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_LOCAL_READ_FEATURES_KHR = 1000232000, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_LOCATION_INFO_KHR = 1000232001, + VK_STRUCTURE_TYPE_RENDERING_INPUT_ATTACHMENT_INDEX_INFO_KHR = 1000232002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT = 1000234000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_QUAD_CONTROL_FEATURES_KHR = 1000235000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT = 1000237000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT = 1000238000, + VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT = 1000238001, + VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR = 1000239000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV = 1000240000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT = 1000244000, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT = 1000244002, + VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT = 1000247000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR = 1000248000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV = 1000249000, + VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV = 1000250000, + VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV = 1000250001, + VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV = 1000250002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT = 1000251000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT = 1000252000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT = 1000254000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT = 1000254001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT = 1000254002, + VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT = 1000255000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT = 1000255002, + VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT = 1000255001, + VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT = 1000256000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT = 1000260000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT = 1000267000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000, + VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR = 1000269002, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR = 1000269003, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004, + VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT = 1000270000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_PROPERTIES_EXT = 1000270001, + VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT = 1000270002, + VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT = 1000270003, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT = 1000270004, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT = 1000270005, + VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT = 1000270006, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_IMAGE_INFO_EXT = 1000270007, + VK_STRUCTURE_TYPE_SUBRESOURCE_HOST_MEMCPY_SIZE_EXT = 1000270008, + VK_STRUCTURE_TYPE_HOST_IMAGE_COPY_DEVICE_PERFORMANCE_QUERY_EXT = 1000270009, + VK_STRUCTURE_TYPE_MEMORY_MAP_INFO_KHR = 1000271000, + VK_STRUCTURE_TYPE_MEMORY_UNMAP_INFO_KHR = 1000271001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_FEATURES_EXT = 1000272000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAP_MEMORY_PLACED_PROPERTIES_EXT = 1000272001, + VK_STRUCTURE_TYPE_MEMORY_MAP_PLACED_INFO_EXT = 1000272002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT = 1000273000, + VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT = 1000274000, + VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT = 1000274001, + VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT = 1000274002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT = 1000275000, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT = 1000275001, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT = 1000275002, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT = 1000275003, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT = 1000275004, + VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT = 1000275005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV = 1000277000, + VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV = 1000277001, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV = 1000277002, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV = 1000277003, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV = 1000277004, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV = 1000277005, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV = 1000277006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV = 1000277007, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV = 1000278000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV = 1000278001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM = 1000282000, + VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM = 1000282001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_BIAS_CONTROL_FEATURES_EXT = 1000283000, + VK_STRUCTURE_TYPE_DEPTH_BIAS_INFO_EXT = 1000283001, + VK_STRUCTURE_TYPE_DEPTH_BIAS_REPRESENTATION_INFO_EXT = 1000283002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT = 1000284000, + VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT = 1000284001, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT = 1000284002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT = 1000286000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT = 1000286001, + VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT = 1000287000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT = 1000287001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT = 1000287002, + VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV = 1000292000, + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV = 1000292001, + VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV = 1000292002, + VK_STRUCTURE_TYPE_PRESENT_ID_KHR = 1000294000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR = 1000294001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR = 1000299000, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR = 1000299001, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR = 1000299002, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR = 1000299003, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR = 1000299004, + VK_STRUCTURE_TYPE_QUERY_POOL_VIDEO_ENCODE_FEEDBACK_CREATE_INFO_KHR = 1000299005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR = 1000299006, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_PROPERTIES_KHR = 1000299007, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_QUALITY_LEVEL_INFO_KHR = 1000299008, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_GET_INFO_KHR = 1000299009, + VK_STRUCTURE_TYPE_VIDEO_ENCODE_SESSION_PARAMETERS_FEEDBACK_INFO_KHR = 1000299010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000, + VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001, + VK_STRUCTURE_TYPE_CUDA_MODULE_CREATE_INFO_NV = 1000307000, + VK_STRUCTURE_TYPE_CUDA_FUNCTION_CREATE_INFO_NV = 1000307001, + VK_STRUCTURE_TYPE_CUDA_LAUNCH_INFO_NV = 1000307002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_FEATURES_NV = 1000307003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUDA_KERNEL_LAUNCH_PROPERTIES_NV = 1000307004, + VK_STRUCTURE_TYPE_QUERY_LOW_LATENCY_SUPPORT_NV = 1000310000, + VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT = 1000311000, + VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT = 1000311001, + VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT = 1000311002, + VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT = 1000311003, + VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT = 1000311004, + VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT = 1000311005, + VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT = 1000311006, + VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT = 1000311007, + VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT = 1000311008, + VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT = 1000311009, + VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311010, + VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311011, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008, + VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT = 1000316000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT = 1000316001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT = 1000316002, + VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT = 1000316003, + VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT = 1000316004, + VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316005, + VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316006, + VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316007, + VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316008, + VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT = 1000316010, + VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT = 1000316011, + VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT = 1000316012, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316009, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT = 1000320000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT = 1000320001, + VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT = 1000320002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD = 1000321000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR = 1000203000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR = 1000322000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR = 1000323000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV = 1000326000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV = 1000326001, + VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV = 1000326002, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV = 1000327000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV = 1000327001, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV = 1000327002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT = 1000328000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT = 1000328001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT = 1000330000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT = 1000332000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT = 1000332001, + VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM = 1000333000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR = 1000336000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT = 1000338000, + VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT = 1000338001, + VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT = 1000338004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT = 1000339000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT = 1000341000, + VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT = 1000341001, + VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT = 1000341002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT = 1000344000, + VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT = 1000352000, + VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT = 1000352001, + VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT = 1000352002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT = 1000353000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT = 1000354000, + VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT = 1000354001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT = 1000355000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT = 1000355001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT = 1000356000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364000, + VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA = 1000364001, + VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364002, + VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365000, + VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365001, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA = 1000366000, + VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA = 1000366001, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA = 1000366002, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA = 1000366003, + VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA = 1000366004, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA = 1000366005, + VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA = 1000366006, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA = 1000366007, + VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA = 1000366008, + VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA = 1000366009, + VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI = 1000369000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI = 1000369001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI = 1000369002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI = 1000370000, + VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV = 1000371000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV = 1000371001, + VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT = 1000372000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT = 1000372001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAME_BOUNDARY_FEATURES_EXT = 1000375000, + VK_STRUCTURE_TYPE_FRAME_BOUNDARY_EXT = 1000375001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT = 1000376000, + VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT = 1000376001, + VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT = 1000376002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT = 1000377000, + VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX = 1000378000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT = 1000381000, + VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT = 1000381001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT = 1000382000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR = 1000386000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT = 1000391000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT = 1000391001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT = 1000392000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT = 1000392001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT = 1000393000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_FEATURES_EXT = 1000395000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TILE_IMAGE_PROPERTIES_EXT = 1000395001, + VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT = 1000396000, + VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT = 1000396001, + VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT = 1000396002, + VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT = 1000396003, + VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT = 1000396004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT = 1000396005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT = 1000396006, + VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT = 1000396007, + VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT = 1000396008, + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT = 1000396009, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_FEATURES_NV = 1000397000, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISPLACEMENT_MICROMAP_PROPERTIES_NV = 1000397001, +#endif +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_DISPLACEMENT_MICROMAP_NV = 1000397002, +#endif + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_FEATURES_HUAWEI = 1000404000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_PROPERTIES_HUAWEI = 1000404001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CLUSTER_CULLING_SHADER_VRS_FEATURES_HUAWEI = 1000404002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT = 1000411000, + VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_ARM = 1000415000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR = 1000416000, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_SHADER_CORE_CONTROL_CREATE_INFO_ARM = 1000417000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_FEATURES_ARM = 1000417001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_PROPERTIES_ARM = 1000417002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_SLICED_VIEW_OF_3D_FEATURES_EXT = 1000418000, + VK_STRUCTURE_TYPE_IMAGE_VIEW_SLICED_CREATE_INFO_EXT = 1000418001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE = 1000420000, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE = 1000420001, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE = 1000420002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT = 1000421000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT = 1000422000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_FEATURES_ARM = 1000424000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RENDER_PASS_STRIPED_PROPERTIES_ARM = 1000424001, + VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_BEGIN_INFO_ARM = 1000424002, + VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_INFO_ARM = 1000424003, + VK_STRUCTURE_TYPE_RENDER_PASS_STRIPE_SUBMIT_INFO_ARM = 1000424004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM = 1000425000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM = 1000425001, + VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM = 1000425002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV = 1000426000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV = 1000426001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV = 1000427000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV = 1000427001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_COMPUTE_FEATURES_NV = 1000428000, + VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_INDIRECT_BUFFER_INFO_NV = 1000428001, + VK_STRUCTURE_TYPE_PIPELINE_INDIRECT_DEVICE_ADDRESS_INFO_NV = 1000428002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV = 1000430000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MAXIMAL_RECONVERGENCE_FEATURES_KHR = 1000434000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT = 1000437000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM = 1000440000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM = 1000440001, + VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM = 1000440002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_FEATURES_EXT = 1000451000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NESTED_COMMAND_BUFFER_PROPERTIES_EXT = 1000451001, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXT = 1000453000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT = 1000455000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT = 1000455001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT = 1000458000, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT = 1000458001, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT = 1000458002, + VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT = 1000458003, + VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG = 1000459000, + VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG = 1000459001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT = 1000462000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT = 1000462001, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT = 1000462002, + VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT = 1000462003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT = 1000342000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV = 1000464000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV = 1000464001, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV = 1000464002, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV = 1000464003, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV = 1000464004, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV = 1000464005, + VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV = 1000464010, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT = 1000465000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = 1000466000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_FEATURES_ANDROID = 1000468000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FORMAT_RESOLVE_PROPERTIES_ANDROID = 1000468001, + VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_RESOLVE_PROPERTIES_ANDROID = 1000468002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_FEATURES_KHR = 1000470000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_5_PROPERTIES_KHR = 1000470001, + VK_STRUCTURE_TYPE_RENDERING_AREA_INFO_KHR = 1000470003, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_SUBRESOURCE_INFO_KHR = 1000470004, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR = 1000338002, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR = 1000338003, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_FLAGS_2_CREATE_INFO_KHR = 1000470005, + VK_STRUCTURE_TYPE_BUFFER_USAGE_FLAGS_2_CREATE_INFO_KHR = 1000470006, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ANTI_LAG_FEATURES_AMD = 1000476000, + VK_STRUCTURE_TYPE_ANTI_LAG_DATA_AMD = 1000476001, + VK_STRUCTURE_TYPE_ANTI_LAG_PRESENTATION_INFO_AMD = 1000476002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_POSITION_FETCH_FEATURES_KHR = 1000481000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT = 1000482000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_PROPERTIES_EXT = 1000482001, + VK_STRUCTURE_TYPE_SHADER_CREATE_INFO_EXT = 1000482002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_FEATURES_KHR = 1000483000, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_CREATE_INFO_KHR = 1000483001, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_INFO_KHR = 1000483002, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_KEY_KHR = 1000483003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_BINARY_PROPERTIES_KHR = 1000483004, + VK_STRUCTURE_TYPE_RELEASE_CAPTURED_PIPELINE_DATA_INFO_KHR = 1000483005, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_DATA_INFO_KHR = 1000483006, + VK_STRUCTURE_TYPE_PIPELINE_CREATE_INFO_KHR = 1000483007, + VK_STRUCTURE_TYPE_DEVICE_PIPELINE_BINARY_INTERNAL_CACHE_CONTROL_KHR = 1000483008, + VK_STRUCTURE_TYPE_PIPELINE_BINARY_HANDLES_INFO_KHR = 1000483009, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM = 1000484000, + VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM = 1000484001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC = 1000485000, + VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC = 1000485001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM = 1000488000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV = 1000490000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV = 1000490001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_FEATURES_NV = 1000492000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_SPARSE_ADDRESS_SPACE_PROPERTIES_NV = 1000492001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT = 1000351000, + VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT = 1000351002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_FEATURES_EXT = 1000495000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_VERTEX_ATTRIBUTES_PROPERTIES_EXT = 1000495001, + VK_STRUCTURE_TYPE_LAYER_SETTINGS_CREATE_INFO_EXT = 1000496000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM = 1000497000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM = 1000497001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_LIBRARY_GROUP_HANDLES_FEATURES_EXT = 1000498000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_FEATURES_EXT = 1000499000, + VK_STRUCTURE_TYPE_LATENCY_SLEEP_MODE_INFO_NV = 1000505000, + VK_STRUCTURE_TYPE_LATENCY_SLEEP_INFO_NV = 1000505001, + VK_STRUCTURE_TYPE_SET_LATENCY_MARKER_INFO_NV = 1000505002, + VK_STRUCTURE_TYPE_GET_LATENCY_MARKER_INFO_NV = 1000505003, + VK_STRUCTURE_TYPE_LATENCY_TIMINGS_FRAME_REPORT_NV = 1000505004, + VK_STRUCTURE_TYPE_LATENCY_SUBMISSION_PRESENT_ID_NV = 1000505005, + VK_STRUCTURE_TYPE_OUT_OF_BAND_QUEUE_TYPE_INFO_NV = 1000505006, + VK_STRUCTURE_TYPE_SWAPCHAIN_LATENCY_CREATE_INFO_NV = 1000505007, + VK_STRUCTURE_TYPE_LATENCY_SURFACE_CAPABILITIES_NV = 1000505008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR = 1000506000, + VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR = 1000506001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_KHR = 1000506002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_RENDER_AREAS_FEATURES_QCOM = 1000510000, + VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_RENDER_AREAS_RENDER_PASS_BEGIN_INFO_QCOM = 1000510001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR = 1000201000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_PROPERTIES_KHR = 1000511000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_CAPABILITIES_KHR = 1000512000, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PICTURE_INFO_KHR = 1000512001, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_PROFILE_INFO_KHR = 1000512003, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000512004, + VK_STRUCTURE_TYPE_VIDEO_DECODE_AV1_DPB_SLOT_INFO_KHR = 1000512005, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR = 1000515000, + VK_STRUCTURE_TYPE_VIDEO_INLINE_QUERY_INFO_KHR = 1000515001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PER_STAGE_DESCRIPTOR_SET_FEATURES_NV = 1000516000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_FEATURES_QCOM = 1000518000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_2_PROPERTIES_QCOM = 1000518001, + VK_STRUCTURE_TYPE_SAMPLER_BLOCK_MATCH_WINDOW_CREATE_INFO_QCOM = 1000518002, + VK_STRUCTURE_TYPE_SAMPLER_CUBIC_WEIGHTS_CREATE_INFO_QCOM = 1000519000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_WEIGHTS_FEATURES_QCOM = 1000519001, + VK_STRUCTURE_TYPE_BLIT_IMAGE_CUBIC_WEIGHTS_INFO_QCOM = 1000519002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_DEGAMMA_FEATURES_QCOM = 1000520000, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_YCBCR_DEGAMMA_CREATE_INFO_QCOM = 1000520001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUBIC_CLAMP_FEATURES_QCOM = 1000521000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_FEATURES_EXT = 1000524000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_KHR = 1000525000, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR = 1000190001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR = 1000190002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT_CONTROLS_2_FEATURES_KHR = 1000528000, + VK_STRUCTURE_TYPE_SCREEN_BUFFER_PROPERTIES_QNX = 1000529000, + VK_STRUCTURE_TYPE_SCREEN_BUFFER_FORMAT_PROPERTIES_QNX = 1000529001, + VK_STRUCTURE_TYPE_IMPORT_SCREEN_BUFFER_INFO_QNX = 1000529002, + VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_QNX = 1000529003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_SCREEN_BUFFER_FEATURES_QNX = 1000529004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_DRIVER_PROPERTIES_MSFT = 1000530000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR = 1000265000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR = 1000259000, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR = 1000259001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR = 1000259002, + VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR = 1000184000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR = 1000544000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_FEATURES_KHR = 1000545000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_6_PROPERTIES_KHR = 1000545001, + VK_STRUCTURE_TYPE_BIND_MEMORY_STATUS_KHR = 1000545002, + VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_SETS_INFO_KHR = 1000545003, + VK_STRUCTURE_TYPE_PUSH_CONSTANTS_INFO_KHR = 1000545004, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_INFO_KHR = 1000545005, + VK_STRUCTURE_TYPE_PUSH_DESCRIPTOR_SET_WITH_TEMPLATE_INFO_KHR = 1000545006, + VK_STRUCTURE_TYPE_SET_DESCRIPTOR_BUFFER_OFFSETS_INFO_EXT = 1000545007, + VK_STRUCTURE_TYPE_BIND_DESCRIPTOR_BUFFER_EMBEDDED_SAMPLERS_INFO_EXT = 1000545008, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_POOL_OVERALLOCATION_FEATURES_NV = 1000546000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAW_ACCESS_CHAINS_FEATURES_NV = 1000555000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR = 1000558000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMMAND_BUFFER_INHERITANCE_FEATURES_NV = 1000559000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_FEATURES_KHR = 1000562000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_7_PROPERTIES_KHR = 1000562001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_LIST_KHR = 1000562002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_PROPERTIES_KHR = 1000562003, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LAYERED_API_VULKAN_PROPERTIES_KHR = 1000562004, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT16_VECTOR_FEATURES_NV = 1000563000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT = 1000564000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_VALIDATION_FEATURES_NV = 1000568000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_EXT = 1000572000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_EXT = 1000572001, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_EXT = 1000572002, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_CREATE_INFO_EXT = 1000572003, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_EXT = 1000572004, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_EXT = 1000572006, + VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_EXT = 1000572007, + VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_PIPELINE_EXT = 1000572008, + VK_STRUCTURE_TYPE_WRITE_INDIRECT_EXECUTION_SET_SHADER_EXT = 1000572009, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_PIPELINE_INFO_EXT = 1000572010, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_INFO_EXT = 1000572011, + VK_STRUCTURE_TYPE_INDIRECT_EXECUTION_SET_SHADER_LAYOUT_INFO_EXT = 1000572012, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_PIPELINE_INFO_EXT = 1000572013, + VK_STRUCTURE_TYPE_GENERATED_COMMANDS_SHADER_INFO_EXT = 1000572014, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_FEATURES_MESA = 1000575000, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ALIGNMENT_CONTROL_PROPERTIES_MESA = 1000575001, + VK_STRUCTURE_TYPE_IMAGE_ALIGNMENT_CONTROL_CREATE_INFO_MESA = 1000575002, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_CONTROL_FEATURES_EXT = 1000582000, + VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLAMP_CONTROL_CREATE_INFO_EXT = 1000582001, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES, + // VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT is a deprecated alias + VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT, + VK_STRUCTURE_TYPE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_INFO, + VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO, + VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO, + VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD, + VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO, + VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES, + VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES, + VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO, + // VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT is a deprecated alias + VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO, + VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO, + VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2, + VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2, + VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2, + VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO, + VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO, + VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES, + VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES, + VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO, + VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS, + VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES, + VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES, + VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK, + VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2, + VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2, + VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO, + VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES, + VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES, + VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO, + VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES, + VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT, + VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES, + VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_KHR, + VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_KHR, + VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES, + VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES, + VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO, + VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO, + VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO, + VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO, + // VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL is a deprecated alias + VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES, + VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES, + VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT, + VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES, + VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES, + VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO, + VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO, + VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_KHR, + VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES, + VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO, + VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES, + VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2, + VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2, + VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2, + VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO, + VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2, + VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO, + VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES, + VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2, + VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2, + VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2, + VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2, + VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2, + VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2, + VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2, + VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2, + VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_KHR, + VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT, + VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT, + VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3, + VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR, + VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES, + VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES, + VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS, + VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS, + VK_STRUCTURE_TYPE_SHADER_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO, + VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkStructureType; + +typedef enum VkPipelineCacheHeaderVersion { + VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, + VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheHeaderVersion; + +typedef enum VkImageLayout { + VK_IMAGE_LAYOUT_UNDEFINED = 0, + VK_IMAGE_LAYOUT_GENERAL = 1, + VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, + VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, + VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, + VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, + VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, + VK_IMAGE_LAYOUT_PREINITIALIZED = 8, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL = 1000241000, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL = 1000241001, + VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002, + VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003, + VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL = 1000314000, + VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL = 1000314001, + VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, + VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR = 1000024000, + VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR = 1000024001, + VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR = 1000024002, + VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, + VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000, + VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR = 1000164003, + VK_IMAGE_LAYOUT_RENDERING_LOCAL_READ_KHR = 1000232000, + VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR = 1000299000, + VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR = 1000299001, + VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002, + VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT = 1000339000, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR, + VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL, + VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, + VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF +} VkImageLayout; + +typedef enum VkObjectType { + VK_OBJECT_TYPE_UNKNOWN = 0, + VK_OBJECT_TYPE_INSTANCE = 1, + VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, + VK_OBJECT_TYPE_DEVICE = 3, + VK_OBJECT_TYPE_QUEUE = 4, + VK_OBJECT_TYPE_SEMAPHORE = 5, + VK_OBJECT_TYPE_COMMAND_BUFFER = 6, + VK_OBJECT_TYPE_FENCE = 7, + VK_OBJECT_TYPE_DEVICE_MEMORY = 8, + VK_OBJECT_TYPE_BUFFER = 9, + VK_OBJECT_TYPE_IMAGE = 10, + VK_OBJECT_TYPE_EVENT = 11, + VK_OBJECT_TYPE_QUERY_POOL = 12, + VK_OBJECT_TYPE_BUFFER_VIEW = 13, + VK_OBJECT_TYPE_IMAGE_VIEW = 14, + VK_OBJECT_TYPE_SHADER_MODULE = 15, + VK_OBJECT_TYPE_PIPELINE_CACHE = 16, + VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, + VK_OBJECT_TYPE_RENDER_PASS = 18, + VK_OBJECT_TYPE_PIPELINE = 19, + VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, + VK_OBJECT_TYPE_SAMPLER = 21, + VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, + VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, + VK_OBJECT_TYPE_FRAMEBUFFER = 24, + VK_OBJECT_TYPE_COMMAND_POOL = 25, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000, + VK_OBJECT_TYPE_PRIVATE_DATA_SLOT = 1000295000, + VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, + VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, + VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, + VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, + VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, + VK_OBJECT_TYPE_VIDEO_SESSION_KHR = 1000023000, + VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR = 1000023001, + VK_OBJECT_TYPE_CU_MODULE_NVX = 1000029000, + VK_OBJECT_TYPE_CU_FUNCTION_NVX = 1000029001, + VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000, + VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, + VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000, + VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000, + VK_OBJECT_TYPE_CUDA_MODULE_NV = 1000307000, + VK_OBJECT_TYPE_CUDA_FUNCTION_NV = 1000307001, + VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000, + VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000, + VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000, + VK_OBJECT_TYPE_SHADER_EXT = 1000482000, + VK_OBJECT_TYPE_PIPELINE_BINARY_KHR = 1000483000, + VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_EXT = 1000572000, + VK_OBJECT_TYPE_INDIRECT_EXECUTION_SET_EXT = 1000572001, + VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, + VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION, + VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT, + VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkObjectType; + +typedef enum VkVendorId { + VK_VENDOR_ID_KHRONOS = 0x10000, + VK_VENDOR_ID_VIV = 0x10001, + VK_VENDOR_ID_VSI = 0x10002, + VK_VENDOR_ID_KAZAN = 0x10003, + VK_VENDOR_ID_CODEPLAY = 0x10004, + VK_VENDOR_ID_MESA = 0x10005, + VK_VENDOR_ID_POCL = 0x10006, + VK_VENDOR_ID_MOBILEYE = 0x10007, + VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF +} VkVendorId; + +typedef enum VkSystemAllocationScope { + VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, + VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, + VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, + VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, + VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, + VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF +} VkSystemAllocationScope; + +typedef enum VkInternalAllocationType { + VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, + VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkInternalAllocationType; + +typedef enum VkFormat { + VK_FORMAT_UNDEFINED = 0, + VK_FORMAT_R4G4_UNORM_PACK8 = 1, + VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2, + VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3, + VK_FORMAT_R5G6B5_UNORM_PACK16 = 4, + VK_FORMAT_B5G6R5_UNORM_PACK16 = 5, + VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6, + VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7, + VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8, + VK_FORMAT_R8_UNORM = 9, + VK_FORMAT_R8_SNORM = 10, + VK_FORMAT_R8_USCALED = 11, + VK_FORMAT_R8_SSCALED = 12, + VK_FORMAT_R8_UINT = 13, + VK_FORMAT_R8_SINT = 14, + VK_FORMAT_R8_SRGB = 15, + VK_FORMAT_R8G8_UNORM = 16, + VK_FORMAT_R8G8_SNORM = 17, + VK_FORMAT_R8G8_USCALED = 18, + VK_FORMAT_R8G8_SSCALED = 19, + VK_FORMAT_R8G8_UINT = 20, + VK_FORMAT_R8G8_SINT = 21, + VK_FORMAT_R8G8_SRGB = 22, + VK_FORMAT_R8G8B8_UNORM = 23, + VK_FORMAT_R8G8B8_SNORM = 24, + VK_FORMAT_R8G8B8_USCALED = 25, + VK_FORMAT_R8G8B8_SSCALED = 26, + VK_FORMAT_R8G8B8_UINT = 27, + VK_FORMAT_R8G8B8_SINT = 28, + VK_FORMAT_R8G8B8_SRGB = 29, + VK_FORMAT_B8G8R8_UNORM = 30, + VK_FORMAT_B8G8R8_SNORM = 31, + VK_FORMAT_B8G8R8_USCALED = 32, + VK_FORMAT_B8G8R8_SSCALED = 33, + VK_FORMAT_B8G8R8_UINT = 34, + VK_FORMAT_B8G8R8_SINT = 35, + VK_FORMAT_B8G8R8_SRGB = 36, + VK_FORMAT_R8G8B8A8_UNORM = 37, + VK_FORMAT_R8G8B8A8_SNORM = 38, + VK_FORMAT_R8G8B8A8_USCALED = 39, + VK_FORMAT_R8G8B8A8_SSCALED = 40, + VK_FORMAT_R8G8B8A8_UINT = 41, + VK_FORMAT_R8G8B8A8_SINT = 42, + VK_FORMAT_R8G8B8A8_SRGB = 43, + VK_FORMAT_B8G8R8A8_UNORM = 44, + VK_FORMAT_B8G8R8A8_SNORM = 45, + VK_FORMAT_B8G8R8A8_USCALED = 46, + VK_FORMAT_B8G8R8A8_SSCALED = 47, + VK_FORMAT_B8G8R8A8_UINT = 48, + VK_FORMAT_B8G8R8A8_SINT = 49, + VK_FORMAT_B8G8R8A8_SRGB = 50, + VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51, + VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52, + VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53, + VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54, + VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55, + VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56, + VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57, + VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58, + VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59, + VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60, + VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61, + VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62, + VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63, + VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64, + VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65, + VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66, + VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67, + VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68, + VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69, + VK_FORMAT_R16_UNORM = 70, + VK_FORMAT_R16_SNORM = 71, + VK_FORMAT_R16_USCALED = 72, + VK_FORMAT_R16_SSCALED = 73, + VK_FORMAT_R16_UINT = 74, + VK_FORMAT_R16_SINT = 75, + VK_FORMAT_R16_SFLOAT = 76, + VK_FORMAT_R16G16_UNORM = 77, + VK_FORMAT_R16G16_SNORM = 78, + VK_FORMAT_R16G16_USCALED = 79, + VK_FORMAT_R16G16_SSCALED = 80, + VK_FORMAT_R16G16_UINT = 81, + VK_FORMAT_R16G16_SINT = 82, + VK_FORMAT_R16G16_SFLOAT = 83, + VK_FORMAT_R16G16B16_UNORM = 84, + VK_FORMAT_R16G16B16_SNORM = 85, + VK_FORMAT_R16G16B16_USCALED = 86, + VK_FORMAT_R16G16B16_SSCALED = 87, + VK_FORMAT_R16G16B16_UINT = 88, + VK_FORMAT_R16G16B16_SINT = 89, + VK_FORMAT_R16G16B16_SFLOAT = 90, + VK_FORMAT_R16G16B16A16_UNORM = 91, + VK_FORMAT_R16G16B16A16_SNORM = 92, + VK_FORMAT_R16G16B16A16_USCALED = 93, + VK_FORMAT_R16G16B16A16_SSCALED = 94, + VK_FORMAT_R16G16B16A16_UINT = 95, + VK_FORMAT_R16G16B16A16_SINT = 96, + VK_FORMAT_R16G16B16A16_SFLOAT = 97, + VK_FORMAT_R32_UINT = 98, + VK_FORMAT_R32_SINT = 99, + VK_FORMAT_R32_SFLOAT = 100, + VK_FORMAT_R32G32_UINT = 101, + VK_FORMAT_R32G32_SINT = 102, + VK_FORMAT_R32G32_SFLOAT = 103, + VK_FORMAT_R32G32B32_UINT = 104, + VK_FORMAT_R32G32B32_SINT = 105, + VK_FORMAT_R32G32B32_SFLOAT = 106, + VK_FORMAT_R32G32B32A32_UINT = 107, + VK_FORMAT_R32G32B32A32_SINT = 108, + VK_FORMAT_R32G32B32A32_SFLOAT = 109, + VK_FORMAT_R64_UINT = 110, + VK_FORMAT_R64_SINT = 111, + VK_FORMAT_R64_SFLOAT = 112, + VK_FORMAT_R64G64_UINT = 113, + VK_FORMAT_R64G64_SINT = 114, + VK_FORMAT_R64G64_SFLOAT = 115, + VK_FORMAT_R64G64B64_UINT = 116, + VK_FORMAT_R64G64B64_SINT = 117, + VK_FORMAT_R64G64B64_SFLOAT = 118, + VK_FORMAT_R64G64B64A64_UINT = 119, + VK_FORMAT_R64G64B64A64_SINT = 120, + VK_FORMAT_R64G64B64A64_SFLOAT = 121, + VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122, + VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123, + VK_FORMAT_D16_UNORM = 124, + VK_FORMAT_X8_D24_UNORM_PACK32 = 125, + VK_FORMAT_D32_SFLOAT = 126, + VK_FORMAT_S8_UINT = 127, + VK_FORMAT_D16_UNORM_S8_UINT = 128, + VK_FORMAT_D24_UNORM_S8_UINT = 129, + VK_FORMAT_D32_SFLOAT_S8_UINT = 130, + VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131, + VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132, + VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133, + VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134, + VK_FORMAT_BC2_UNORM_BLOCK = 135, + VK_FORMAT_BC2_SRGB_BLOCK = 136, + VK_FORMAT_BC3_UNORM_BLOCK = 137, + VK_FORMAT_BC3_SRGB_BLOCK = 138, + VK_FORMAT_BC4_UNORM_BLOCK = 139, + VK_FORMAT_BC4_SNORM_BLOCK = 140, + VK_FORMAT_BC5_UNORM_BLOCK = 141, + VK_FORMAT_BC5_SNORM_BLOCK = 142, + VK_FORMAT_BC6H_UFLOAT_BLOCK = 143, + VK_FORMAT_BC6H_SFLOAT_BLOCK = 144, + VK_FORMAT_BC7_UNORM_BLOCK = 145, + VK_FORMAT_BC7_SRGB_BLOCK = 146, + VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147, + VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148, + VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149, + VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150, + VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151, + VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152, + VK_FORMAT_EAC_R11_UNORM_BLOCK = 153, + VK_FORMAT_EAC_R11_SNORM_BLOCK = 154, + VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155, + VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156, + VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157, + VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158, + VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159, + VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160, + VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161, + VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162, + VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163, + VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164, + VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165, + VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166, + VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167, + VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168, + VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169, + VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170, + VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171, + VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172, + VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173, + VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174, + VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175, + VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176, + VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177, + VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178, + VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179, + VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180, + VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181, + VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182, + VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183, + VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184, + VK_FORMAT_G8B8G8R8_422_UNORM = 1000156000, + VK_FORMAT_B8G8R8G8_422_UNORM = 1000156001, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM = 1000156002, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM = 1000156003, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM = 1000156004, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM = 1000156005, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM = 1000156006, + VK_FORMAT_R10X6_UNORM_PACK16 = 1000156007, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16 = 1000156008, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 = 1000156009, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 = 1000156010, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 = 1000156011, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 = 1000156012, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 = 1000156013, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 = 1000156014, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 = 1000156015, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 = 1000156016, + VK_FORMAT_R12X4_UNORM_PACK16 = 1000156017, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16 = 1000156018, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 = 1000156019, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 = 1000156020, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 = 1000156021, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 = 1000156022, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 = 1000156023, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 = 1000156024, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 = 1000156025, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 = 1000156026, + VK_FORMAT_G16B16G16R16_422_UNORM = 1000156027, + VK_FORMAT_B16G16R16G16_422_UNORM = 1000156028, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM = 1000156029, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM = 1000156030, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM = 1000156031, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM = 1000156032, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM = 1000156033, + VK_FORMAT_G8_B8R8_2PLANE_444_UNORM = 1000330000, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16 = 1000330001, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16 = 1000330002, + VK_FORMAT_G16_B16R16_2PLANE_444_UNORM = 1000330003, + VK_FORMAT_A4R4G4B4_UNORM_PACK16 = 1000340000, + VK_FORMAT_A4B4G4R4_UNORM_PACK16 = 1000340001, + VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK = 1000066000, + VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK = 1000066001, + VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK = 1000066002, + VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK = 1000066003, + VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK = 1000066004, + VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK = 1000066005, + VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK = 1000066006, + VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK = 1000066007, + VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK = 1000066008, + VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK = 1000066009, + VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK = 1000066010, + VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK = 1000066011, + VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK = 1000066012, + VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK = 1000066013, + VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, + VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, + VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, + VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003, + VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004, + VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, + VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, + VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, + VK_FORMAT_R16G16_SFIXED5_NV = 1000464000, + VK_FORMAT_A1B5G5R5_UNORM_PACK16_KHR = 1000470000, + VK_FORMAT_A8_UNORM_KHR = 1000470001, + VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK, + VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK, + VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK, + VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK, + VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK, + VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK, + VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK, + VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK, + VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM, + VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM, + VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM, + VK_FORMAT_R10X6_UNORM_PACK16_KHR = VK_FORMAT_R10X6_UNORM_PACK16, + VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16, + VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16, + VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16, + VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_R12X4_UNORM_PACK16_KHR = VK_FORMAT_R12X4_UNORM_PACK16, + VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16, + VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16, + VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16, + VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16, + VK_FORMAT_G16B16G16R16_422_UNORM_KHR = VK_FORMAT_G16B16G16R16_422_UNORM, + VK_FORMAT_B16G16R16G16_422_UNORM_KHR = VK_FORMAT_B16G16R16G16_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM, + VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM, + VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM, + VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM, + VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16, + VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16, + VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM, + VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16, + VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16, + // VK_FORMAT_R16G16_S10_5_NV is a deprecated alias + VK_FORMAT_R16G16_S10_5_NV = VK_FORMAT_R16G16_SFIXED5_NV, + VK_FORMAT_MAX_ENUM = 0x7FFFFFFF +} VkFormat; + +typedef enum VkImageTiling { + VK_IMAGE_TILING_OPTIMAL = 0, + VK_IMAGE_TILING_LINEAR = 1, + VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT = 1000158000, + VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF +} VkImageTiling; + +typedef enum VkImageType { + VK_IMAGE_TYPE_1D = 0, + VK_IMAGE_TYPE_2D = 1, + VK_IMAGE_TYPE_3D = 2, + VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageType; + +typedef enum VkPhysicalDeviceType { + VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, + VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, + VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, + VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, + VK_PHYSICAL_DEVICE_TYPE_CPU = 4, + VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkPhysicalDeviceType; + +typedef enum VkQueryType { + VK_QUERY_TYPE_OCCLUSION = 0, + VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, + VK_QUERY_TYPE_TIMESTAMP = 2, + VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR = 1000023000, + VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004, + VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000150000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150001, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000, + VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000, + VK_QUERY_TYPE_VIDEO_ENCODE_FEEDBACK_KHR = 1000299000, + VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT = 1000328000, + VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT = 1000382000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR = 1000386000, + VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR = 1000386001, + VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT = 1000396000, + VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT = 1000396001, + VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkQueryType; + +typedef enum VkSharingMode { + VK_SHARING_MODE_EXCLUSIVE = 0, + VK_SHARING_MODE_CONCURRENT = 1, + VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSharingMode; + +typedef enum VkComponentSwizzle { + VK_COMPONENT_SWIZZLE_IDENTITY = 0, + VK_COMPONENT_SWIZZLE_ZERO = 1, + VK_COMPONENT_SWIZZLE_ONE = 2, + VK_COMPONENT_SWIZZLE_R = 3, + VK_COMPONENT_SWIZZLE_G = 4, + VK_COMPONENT_SWIZZLE_B = 5, + VK_COMPONENT_SWIZZLE_A = 6, + VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF +} VkComponentSwizzle; + +typedef enum VkImageViewType { + VK_IMAGE_VIEW_TYPE_1D = 0, + VK_IMAGE_VIEW_TYPE_2D = 1, + VK_IMAGE_VIEW_TYPE_3D = 2, + VK_IMAGE_VIEW_TYPE_CUBE = 3, + VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, + VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, + VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, + VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkImageViewType; + +typedef enum VkBlendFactor { + VK_BLEND_FACTOR_ZERO = 0, + VK_BLEND_FACTOR_ONE = 1, + VK_BLEND_FACTOR_SRC_COLOR = 2, + VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3, + VK_BLEND_FACTOR_DST_COLOR = 4, + VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5, + VK_BLEND_FACTOR_SRC_ALPHA = 6, + VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7, + VK_BLEND_FACTOR_DST_ALPHA = 8, + VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9, + VK_BLEND_FACTOR_CONSTANT_COLOR = 10, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11, + VK_BLEND_FACTOR_CONSTANT_ALPHA = 12, + VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13, + VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14, + VK_BLEND_FACTOR_SRC1_COLOR = 15, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, + VK_BLEND_FACTOR_SRC1_ALPHA = 17, + VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, + VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF +} VkBlendFactor; + +typedef enum VkBlendOp { + VK_BLEND_OP_ADD = 0, + VK_BLEND_OP_SUBTRACT = 1, + VK_BLEND_OP_REVERSE_SUBTRACT = 2, + VK_BLEND_OP_MIN = 3, + VK_BLEND_OP_MAX = 4, + VK_BLEND_OP_ZERO_EXT = 1000148000, + VK_BLEND_OP_SRC_EXT = 1000148001, + VK_BLEND_OP_DST_EXT = 1000148002, + VK_BLEND_OP_SRC_OVER_EXT = 1000148003, + VK_BLEND_OP_DST_OVER_EXT = 1000148004, + VK_BLEND_OP_SRC_IN_EXT = 1000148005, + VK_BLEND_OP_DST_IN_EXT = 1000148006, + VK_BLEND_OP_SRC_OUT_EXT = 1000148007, + VK_BLEND_OP_DST_OUT_EXT = 1000148008, + VK_BLEND_OP_SRC_ATOP_EXT = 1000148009, + VK_BLEND_OP_DST_ATOP_EXT = 1000148010, + VK_BLEND_OP_XOR_EXT = 1000148011, + VK_BLEND_OP_MULTIPLY_EXT = 1000148012, + VK_BLEND_OP_SCREEN_EXT = 1000148013, + VK_BLEND_OP_OVERLAY_EXT = 1000148014, + VK_BLEND_OP_DARKEN_EXT = 1000148015, + VK_BLEND_OP_LIGHTEN_EXT = 1000148016, + VK_BLEND_OP_COLORDODGE_EXT = 1000148017, + VK_BLEND_OP_COLORBURN_EXT = 1000148018, + VK_BLEND_OP_HARDLIGHT_EXT = 1000148019, + VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020, + VK_BLEND_OP_DIFFERENCE_EXT = 1000148021, + VK_BLEND_OP_EXCLUSION_EXT = 1000148022, + VK_BLEND_OP_INVERT_EXT = 1000148023, + VK_BLEND_OP_INVERT_RGB_EXT = 1000148024, + VK_BLEND_OP_LINEARDODGE_EXT = 1000148025, + VK_BLEND_OP_LINEARBURN_EXT = 1000148026, + VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027, + VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028, + VK_BLEND_OP_PINLIGHT_EXT = 1000148029, + VK_BLEND_OP_HARDMIX_EXT = 1000148030, + VK_BLEND_OP_HSL_HUE_EXT = 1000148031, + VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032, + VK_BLEND_OP_HSL_COLOR_EXT = 1000148033, + VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034, + VK_BLEND_OP_PLUS_EXT = 1000148035, + VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036, + VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037, + VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038, + VK_BLEND_OP_MINUS_EXT = 1000148039, + VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040, + VK_BLEND_OP_CONTRAST_EXT = 1000148041, + VK_BLEND_OP_INVERT_OVG_EXT = 1000148042, + VK_BLEND_OP_RED_EXT = 1000148043, + VK_BLEND_OP_GREEN_EXT = 1000148044, + VK_BLEND_OP_BLUE_EXT = 1000148045, + VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF +} VkBlendOp; + +typedef enum VkCompareOp { + VK_COMPARE_OP_NEVER = 0, + VK_COMPARE_OP_LESS = 1, + VK_COMPARE_OP_EQUAL = 2, + VK_COMPARE_OP_LESS_OR_EQUAL = 3, + VK_COMPARE_OP_GREATER = 4, + VK_COMPARE_OP_NOT_EQUAL = 5, + VK_COMPARE_OP_GREATER_OR_EQUAL = 6, + VK_COMPARE_OP_ALWAYS = 7, + VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF +} VkCompareOp; + +typedef enum VkDynamicState { + VK_DYNAMIC_STATE_VIEWPORT = 0, + VK_DYNAMIC_STATE_SCISSOR = 1, + VK_DYNAMIC_STATE_LINE_WIDTH = 2, + VK_DYNAMIC_STATE_DEPTH_BIAS = 3, + VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4, + VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5, + VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6, + VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7, + VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8, + VK_DYNAMIC_STATE_CULL_MODE = 1000267000, + VK_DYNAMIC_STATE_FRONT_FACE = 1000267001, + VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY = 1000267002, + VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT = 1000267003, + VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT = 1000267004, + VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE = 1000267005, + VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE = 1000267006, + VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE = 1000267007, + VK_DYNAMIC_STATE_DEPTH_COMPARE_OP = 1000267008, + VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE = 1000267009, + VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE = 1000267010, + VK_DYNAMIC_STATE_STENCIL_OP = 1000267011, + VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE = 1000377001, + VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE = 1000377002, + VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE = 1000377004, + VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_ENABLE_EXT = 1000099001, + VK_DYNAMIC_STATE_DISCARD_RECTANGLE_MODE_EXT = 1000099002, + VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000, + VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR = 1000347000, + VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV = 1000164004, + VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006, + VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_ENABLE_NV = 1000205000, + VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001, + VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR = 1000226000, + VK_DYNAMIC_STATE_VERTEX_INPUT_EXT = 1000352000, + VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT = 1000377000, + VK_DYNAMIC_STATE_LOGIC_OP_EXT = 1000377003, + VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT = 1000381000, + VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT = 1000455003, + VK_DYNAMIC_STATE_POLYGON_MODE_EXT = 1000455004, + VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT = 1000455005, + VK_DYNAMIC_STATE_SAMPLE_MASK_EXT = 1000455006, + VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT = 1000455007, + VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT = 1000455008, + VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT = 1000455009, + VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT = 1000455010, + VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT = 1000455011, + VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT = 1000455012, + VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT = 1000455002, + VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT = 1000455013, + VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT = 1000455014, + VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT = 1000455015, + VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT = 1000455016, + VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT = 1000455017, + VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT = 1000455018, + VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT = 1000455019, + VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT = 1000455020, + VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT = 1000455021, + VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT = 1000455022, + VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV = 1000455023, + VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV = 1000455024, + VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV = 1000455025, + VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV = 1000455026, + VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV = 1000455027, + VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV = 1000455028, + VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV = 1000455029, + VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV = 1000455030, + VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV = 1000455031, + VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV = 1000455032, + VK_DYNAMIC_STATE_ATTACHMENT_FEEDBACK_LOOP_ENABLE_EXT = 1000524000, + VK_DYNAMIC_STATE_LINE_STIPPLE_KHR = 1000259000, + VK_DYNAMIC_STATE_DEPTH_CLAMP_RANGE_EXT = 1000582000, + VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = VK_DYNAMIC_STATE_LINE_STIPPLE_KHR, + VK_DYNAMIC_STATE_CULL_MODE_EXT = VK_DYNAMIC_STATE_CULL_MODE, + VK_DYNAMIC_STATE_FRONT_FACE_EXT = VK_DYNAMIC_STATE_FRONT_FACE, + VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY, + VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT, + VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT, + VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE, + VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE, + VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE, + VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP, + VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE, + VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE, + VK_DYNAMIC_STATE_STENCIL_OP_EXT = VK_DYNAMIC_STATE_STENCIL_OP, + VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE, + VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE, + VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE, + VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF +} VkDynamicState; + +typedef enum VkFrontFace { + VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, + VK_FRONT_FACE_CLOCKWISE = 1, + VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF +} VkFrontFace; + +typedef enum VkVertexInputRate { + VK_VERTEX_INPUT_RATE_VERTEX = 0, + VK_VERTEX_INPUT_RATE_INSTANCE = 1, + VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF +} VkVertexInputRate; + +typedef enum VkPrimitiveTopology { + VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, + VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, + VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, + VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, + VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, + VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF +} VkPrimitiveTopology; + +typedef enum VkPolygonMode { + VK_POLYGON_MODE_FILL = 0, + VK_POLYGON_MODE_LINE = 1, + VK_POLYGON_MODE_POINT = 2, + VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, + VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF +} VkPolygonMode; + +typedef enum VkStencilOp { + VK_STENCIL_OP_KEEP = 0, + VK_STENCIL_OP_ZERO = 1, + VK_STENCIL_OP_REPLACE = 2, + VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, + VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, + VK_STENCIL_OP_INVERT = 5, + VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, + VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, + VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF +} VkStencilOp; + +typedef enum VkLogicOp { + VK_LOGIC_OP_CLEAR = 0, + VK_LOGIC_OP_AND = 1, + VK_LOGIC_OP_AND_REVERSE = 2, + VK_LOGIC_OP_COPY = 3, + VK_LOGIC_OP_AND_INVERTED = 4, + VK_LOGIC_OP_NO_OP = 5, + VK_LOGIC_OP_XOR = 6, + VK_LOGIC_OP_OR = 7, + VK_LOGIC_OP_NOR = 8, + VK_LOGIC_OP_EQUIVALENT = 9, + VK_LOGIC_OP_INVERT = 10, + VK_LOGIC_OP_OR_REVERSE = 11, + VK_LOGIC_OP_COPY_INVERTED = 12, + VK_LOGIC_OP_OR_INVERTED = 13, + VK_LOGIC_OP_NAND = 14, + VK_LOGIC_OP_SET = 15, + VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF +} VkLogicOp; + +typedef enum VkBorderColor { + VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, + VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, + VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, + VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, + VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, + VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, + VK_BORDER_COLOR_FLOAT_CUSTOM_EXT = 1000287003, + VK_BORDER_COLOR_INT_CUSTOM_EXT = 1000287004, + VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF +} VkBorderColor; + +typedef enum VkFilter { + VK_FILTER_NEAREST = 0, + VK_FILTER_LINEAR = 1, + VK_FILTER_CUBIC_EXT = 1000015000, + VK_FILTER_CUBIC_IMG = VK_FILTER_CUBIC_EXT, + VK_FILTER_MAX_ENUM = 0x7FFFFFFF +} VkFilter; + +typedef enum VkSamplerAddressMode { + VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, + VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, + VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, + // VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR is a deprecated alias + VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE, + VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerAddressMode; + +typedef enum VkSamplerMipmapMode { + VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, + VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, + VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerMipmapMode; + +typedef enum VkDescriptorType { + VK_DESCRIPTOR_TYPE_SAMPLER = 0, + VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1, + VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2, + VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3, + VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4, + VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7, + VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8, + VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, + VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, + VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK = 1000138000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000, + VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000, + VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM = 1000440000, + VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM = 1000440001, + VK_DESCRIPTOR_TYPE_MUTABLE_EXT = 1000351000, + VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK, + VK_DESCRIPTOR_TYPE_MUTABLE_VALVE = VK_DESCRIPTOR_TYPE_MUTABLE_EXT, + VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorType; + +typedef enum VkAttachmentLoadOp { + VK_ATTACHMENT_LOAD_OP_LOAD = 0, + VK_ATTACHMENT_LOAD_OP_CLEAR = 1, + VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, + VK_ATTACHMENT_LOAD_OP_NONE_KHR = 1000400000, + VK_ATTACHMENT_LOAD_OP_NONE_EXT = VK_ATTACHMENT_LOAD_OP_NONE_KHR, + VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentLoadOp; + +typedef enum VkAttachmentStoreOp { + VK_ATTACHMENT_STORE_OP_STORE = 0, + VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, + VK_ATTACHMENT_STORE_OP_NONE = 1000301000, + VK_ATTACHMENT_STORE_OP_NONE_KHR = VK_ATTACHMENT_STORE_OP_NONE, + VK_ATTACHMENT_STORE_OP_NONE_QCOM = VK_ATTACHMENT_STORE_OP_NONE, + VK_ATTACHMENT_STORE_OP_NONE_EXT = VK_ATTACHMENT_STORE_OP_NONE, + VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentStoreOp; + +typedef enum VkPipelineBindPoint { + VK_PIPELINE_BIND_POINT_GRAPHICS = 0, + VK_PIPELINE_BIND_POINT_COMPUTE = 1, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_PIPELINE_BIND_POINT_EXECUTION_GRAPH_AMDX = 1000134000, +#endif + VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000, + VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI = 1000369003, + VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR, + VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF +} VkPipelineBindPoint; + +typedef enum VkCommandBufferLevel { + VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, + VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, + VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferLevel; + +typedef enum VkIndexType { + VK_INDEX_TYPE_UINT16 = 0, + VK_INDEX_TYPE_UINT32 = 1, + VK_INDEX_TYPE_NONE_KHR = 1000165000, + VK_INDEX_TYPE_UINT8_KHR = 1000265000, + VK_INDEX_TYPE_NONE_NV = VK_INDEX_TYPE_NONE_KHR, + VK_INDEX_TYPE_UINT8_EXT = VK_INDEX_TYPE_UINT8_KHR, + VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkIndexType; + +typedef enum VkSubpassContents { + VK_SUBPASS_CONTENTS_INLINE = 0, + VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, + VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR = 1000451000, + VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_EXT = VK_SUBPASS_CONTENTS_INLINE_AND_SECONDARY_COMMAND_BUFFERS_KHR, + VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassContents; + +typedef enum VkAccessFlagBits { + VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, + VK_ACCESS_INDEX_READ_BIT = 0x00000002, + VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, + VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, + VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, + VK_ACCESS_SHADER_READ_BIT = 0x00000020, + VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, + VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, + VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, + VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, + VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, + VK_ACCESS_HOST_READ_BIT = 0x00002000, + VK_ACCESS_HOST_WRITE_BIT = 0x00004000, + VK_ACCESS_MEMORY_READ_BIT = 0x00008000, + VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, + VK_ACCESS_NONE = 0, + VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000, + VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000, + VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000, + VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000, + VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000, + VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000, + VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000, + VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000, + VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR, + VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR, + VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR, + VK_ACCESS_NONE_KHR = VK_ACCESS_NONE, + VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_EXT = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV, + VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_EXT = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV, + VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAccessFlagBits; +typedef VkFlags VkAccessFlags; + +typedef enum VkImageAspectFlagBits { + VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, + VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, + VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, + VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, + VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010, + VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020, + VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040, + VK_IMAGE_ASPECT_NONE = 0, + VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080, + VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100, + VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200, + VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400, + VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT, + VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT, + VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT, + VK_IMAGE_ASPECT_NONE_KHR = VK_IMAGE_ASPECT_NONE, + VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageAspectFlagBits; +typedef VkFlags VkImageAspectFlags; + +typedef enum VkFormatFeatureFlagBits { + VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, + VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002, + VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004, + VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010, + VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020, + VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080, + VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100, + VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200, + VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400, + VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT = 0x00004000, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT = 0x00008000, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000, + VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000, + VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000, + VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000, + VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000, + VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000, + VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000, + VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000, + VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT, + VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT, + VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT, + VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT, + VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT, + VK_FORMAT_FEATURE_DISJOINT_BIT_KHR = VK_FORMAT_FEATURE_DISJOINT_BIT, + VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT, + VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFormatFeatureFlagBits; +typedef VkFlags VkFormatFeatureFlags; + +typedef enum VkImageCreateFlagBits { + VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008, + VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010, + VK_IMAGE_CREATE_ALIAS_BIT = 0x00000400, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT = 0x00000040, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT = 0x00000020, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT = 0x00000080, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT = 0x00000100, + VK_IMAGE_CREATE_PROTECTED_BIT = 0x00000800, + VK_IMAGE_CREATE_DISJOINT_BIT = 0x00000200, + VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV = 0x00002000, + VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000, + VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT = 0x00004000, + VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00010000, + VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT = 0x00040000, + VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT = 0x00020000, + VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM = 0x00008000, + VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR = 0x00100000, + VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT, + VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT, + VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT, + VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT, + VK_IMAGE_CREATE_DISJOINT_BIT_KHR = VK_IMAGE_CREATE_DISJOINT_BIT, + VK_IMAGE_CREATE_ALIAS_BIT_KHR = VK_IMAGE_CREATE_ALIAS_BIT, + VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageCreateFlagBits; +typedef VkFlags VkImageCreateFlags; + +typedef enum VkSampleCountFlagBits { + VK_SAMPLE_COUNT_1_BIT = 0x00000001, + VK_SAMPLE_COUNT_2_BIT = 0x00000002, + VK_SAMPLE_COUNT_4_BIT = 0x00000004, + VK_SAMPLE_COUNT_8_BIT = 0x00000008, + VK_SAMPLE_COUNT_16_BIT = 0x00000010, + VK_SAMPLE_COUNT_32_BIT = 0x00000020, + VK_SAMPLE_COUNT_64_BIT = 0x00000040, + VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSampleCountFlagBits; +typedef VkFlags VkSampleCountFlags; + +typedef enum VkImageUsageFlagBits { + VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, + VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, + VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, + VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, + VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, + VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, + VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00000400, + VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00000800, + VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR = 0x00001000, + VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200, + VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00000100, + VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT = 0x00400000, + VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00002000, + VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00004000, + VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR = 0x00008000, + VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x00080000, + VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI = 0x00040000, + VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM = 0x00100000, + VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM = 0x00200000, + VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageUsageFlagBits; +typedef VkFlags VkImageUsageFlags; + +typedef enum VkInstanceCreateFlagBits { + VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR = 0x00000001, + VK_INSTANCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkInstanceCreateFlagBits; +typedef VkFlags VkInstanceCreateFlags; + +typedef enum VkMemoryHeapFlagBits { + VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002, + VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT, + VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryHeapFlagBits; +typedef VkFlags VkMemoryHeapFlags; + +typedef enum VkMemoryPropertyFlagBits { + VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, + VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002, + VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004, + VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008, + VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010, + VK_MEMORY_PROPERTY_PROTECTED_BIT = 0x00000020, + VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD = 0x00000040, + VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD = 0x00000080, + VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV = 0x00000100, + VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryPropertyFlagBits; +typedef VkFlags VkMemoryPropertyFlags; + +typedef enum VkQueueFlagBits { + VK_QUEUE_GRAPHICS_BIT = 0x00000001, + VK_QUEUE_COMPUTE_BIT = 0x00000002, + VK_QUEUE_TRANSFER_BIT = 0x00000004, + VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, + VK_QUEUE_PROTECTED_BIT = 0x00000010, + VK_QUEUE_VIDEO_DECODE_BIT_KHR = 0x00000020, + VK_QUEUE_VIDEO_ENCODE_BIT_KHR = 0x00000040, + VK_QUEUE_OPTICAL_FLOW_BIT_NV = 0x00000100, + VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueueFlagBits; +typedef VkFlags VkQueueFlags; +typedef VkFlags VkDeviceCreateFlags; + +typedef enum VkDeviceQueueCreateFlagBits { + VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT = 0x00000001, + VK_DEVICE_QUEUE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDeviceQueueCreateFlagBits; +typedef VkFlags VkDeviceQueueCreateFlags; + +typedef enum VkPipelineStageFlagBits { + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001, + VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002, + VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004, + VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008, + VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010, + VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020, + VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040, + VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080, + VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100, + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200, + VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400, + VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800, + VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000, + VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000, + VK_PIPELINE_STAGE_HOST_BIT = 0x00004000, + VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000, + VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, + VK_PIPELINE_STAGE_NONE = 0, + VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000, + VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000, + VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000, + VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000, + VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000, + VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT = 0x00080000, + VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT = 0x00100000, + VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR, + VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR, + VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT, + VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT, + VK_PIPELINE_STAGE_NONE_KHR = VK_PIPELINE_STAGE_NONE, + VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_EXT = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV, + VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineStageFlagBits; +typedef VkFlags VkPipelineStageFlags; + +typedef enum VkMemoryMapFlagBits { + VK_MEMORY_MAP_PLACED_BIT_EXT = 0x00000001, + VK_MEMORY_MAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryMapFlagBits; +typedef VkFlags VkMemoryMapFlags; + +typedef enum VkSparseMemoryBindFlagBits { + VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, + VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseMemoryBindFlagBits; +typedef VkFlags VkSparseMemoryBindFlags; + +typedef enum VkSparseImageFormatFlagBits { + VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, + VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002, + VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004, + VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSparseImageFormatFlagBits; +typedef VkFlags VkSparseImageFormatFlags; + +typedef enum VkFenceCreateFlagBits { + VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, + VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceCreateFlagBits; +typedef VkFlags VkFenceCreateFlags; +typedef VkFlags VkSemaphoreCreateFlags; + +typedef enum VkEventCreateFlagBits { + VK_EVENT_CREATE_DEVICE_ONLY_BIT = 0x00000001, + VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT, + VK_EVENT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkEventCreateFlagBits; +typedef VkFlags VkEventCreateFlags; + +typedef enum VkQueryPipelineStatisticFlagBits { + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, + VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002, + VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008, + VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020, + VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040, + VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100, + VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200, + VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400, + VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT = 0x00000800, + VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT = 0x00001000, + VK_QUERY_PIPELINE_STATISTIC_CLUSTER_CULLING_SHADER_INVOCATIONS_BIT_HUAWEI = 0x00002000, + VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryPipelineStatisticFlagBits; +typedef VkFlags VkQueryPipelineStatisticFlags; +typedef VkFlags VkQueryPoolCreateFlags; + +typedef enum VkQueryResultFlagBits { + VK_QUERY_RESULT_64_BIT = 0x00000001, + VK_QUERY_RESULT_WAIT_BIT = 0x00000002, + VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, + VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, + VK_QUERY_RESULT_WITH_STATUS_BIT_KHR = 0x00000010, + VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryResultFlagBits; +typedef VkFlags VkQueryResultFlags; + +typedef enum VkBufferCreateFlagBits { + VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001, + VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, + VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, + VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000010, + VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000020, + VK_BUFFER_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR = 0x00000040, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferCreateFlagBits; +typedef VkFlags VkBufferCreateFlags; + +typedef enum VkBufferUsageFlagBits { + VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001, + VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002, + VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004, + VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008, + VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010, + VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020, + VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040, + VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, + VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT = 0x00020000, + VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000, + VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00004000, + VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800, + VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000, + VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUFFER_USAGE_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000, +#endif + VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000, + VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000, + VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400, + VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000, + VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000, + VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000, + VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000, + VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000, + VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000, + VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT = 0x01000000, + VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT, + VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkBufferUsageFlagBits; +typedef VkFlags VkBufferUsageFlags; +typedef VkFlags VkBufferViewCreateFlags; + +typedef enum VkImageViewCreateFlagBits { + VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT = 0x00000001, + VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000004, + VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT = 0x00000002, + VK_IMAGE_VIEW_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkImageViewCreateFlagBits; +typedef VkFlags VkImageViewCreateFlags; +typedef VkFlags VkShaderModuleCreateFlags; + +typedef enum VkPipelineCacheCreateFlagBits { + VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001, + VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT, + VK_PIPELINE_CACHE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCacheCreateFlagBits; +typedef VkFlags VkPipelineCacheCreateFlags; + +typedef enum VkColorComponentFlagBits { + VK_COLOR_COMPONENT_R_BIT = 0x00000001, + VK_COLOR_COMPONENT_G_BIT = 0x00000002, + VK_COLOR_COMPONENT_B_BIT = 0x00000004, + VK_COLOR_COMPONENT_A_BIT = 0x00000008, + VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkColorComponentFlagBits; +typedef VkFlags VkColorComponentFlags; + +typedef enum VkPipelineCreateFlagBits { + VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, + VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, + VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008, + VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010, + VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100, + VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000, + VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000, + VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000, + VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000, + VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000, + VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020, + VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040, + VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080, + VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000, + VK_PIPELINE_CREATE_LIBRARY_BIT_KHR = 0x00000800, + VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000, + VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000, + VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400, + VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000, + VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000, + VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000, + VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_PIPELINE_CREATE_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000, +#endif + VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000, + VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000, + VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT, + // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR is a deprecated alias + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR, + // VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT is a deprecated alias + VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT, + VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT, + VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE, + VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT, + VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT, + VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCreateFlagBits; +typedef VkFlags VkPipelineCreateFlags; + +typedef enum VkPipelineShaderStageCreateFlagBits { + VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT = 0x00000001, + VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT = 0x00000002, + VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT, + VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT, + VK_PIPELINE_SHADER_STAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineShaderStageCreateFlagBits; +typedef VkFlags VkPipelineShaderStageCreateFlags; + +typedef enum VkShaderStageFlagBits { + VK_SHADER_STAGE_VERTEX_BIT = 0x00000001, + VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002, + VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004, + VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008, + VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010, + VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, + VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, + VK_SHADER_STAGE_ALL = 0x7FFFFFFF, + VK_SHADER_STAGE_RAYGEN_BIT_KHR = 0x00000100, + VK_SHADER_STAGE_ANY_HIT_BIT_KHR = 0x00000200, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR = 0x00000400, + VK_SHADER_STAGE_MISS_BIT_KHR = 0x00000800, + VK_SHADER_STAGE_INTERSECTION_BIT_KHR = 0x00001000, + VK_SHADER_STAGE_CALLABLE_BIT_KHR = 0x00002000, + VK_SHADER_STAGE_TASK_BIT_EXT = 0x00000040, + VK_SHADER_STAGE_MESH_BIT_EXT = 0x00000080, + VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI = 0x00004000, + VK_SHADER_STAGE_CLUSTER_CULLING_BIT_HUAWEI = 0x00080000, + VK_SHADER_STAGE_RAYGEN_BIT_NV = VK_SHADER_STAGE_RAYGEN_BIT_KHR, + VK_SHADER_STAGE_ANY_HIT_BIT_NV = VK_SHADER_STAGE_ANY_HIT_BIT_KHR, + VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR, + VK_SHADER_STAGE_MISS_BIT_NV = VK_SHADER_STAGE_MISS_BIT_KHR, + VK_SHADER_STAGE_INTERSECTION_BIT_NV = VK_SHADER_STAGE_INTERSECTION_BIT_KHR, + VK_SHADER_STAGE_CALLABLE_BIT_NV = VK_SHADER_STAGE_CALLABLE_BIT_KHR, + VK_SHADER_STAGE_TASK_BIT_NV = VK_SHADER_STAGE_TASK_BIT_EXT, + VK_SHADER_STAGE_MESH_BIT_NV = VK_SHADER_STAGE_MESH_BIT_EXT, + VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkShaderStageFlagBits; + +typedef enum VkCullModeFlagBits { + VK_CULL_MODE_NONE = 0, + VK_CULL_MODE_FRONT_BIT = 0x00000001, + VK_CULL_MODE_BACK_BIT = 0x00000002, + VK_CULL_MODE_FRONT_AND_BACK = 0x00000003, + VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCullModeFlagBits; +typedef VkFlags VkCullModeFlags; +typedef VkFlags VkPipelineVertexInputStateCreateFlags; +typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; +typedef VkFlags VkPipelineTessellationStateCreateFlags; +typedef VkFlags VkPipelineViewportStateCreateFlags; +typedef VkFlags VkPipelineRasterizationStateCreateFlags; +typedef VkFlags VkPipelineMultisampleStateCreateFlags; + +typedef enum VkPipelineDepthStencilStateCreateFlagBits { + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT = 0x00000001, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT = 0x00000002, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, + VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineDepthStencilStateCreateFlagBits; +typedef VkFlags VkPipelineDepthStencilStateCreateFlags; + +typedef enum VkPipelineColorBlendStateCreateFlagBits { + VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT = 0x00000001, + VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT, + VK_PIPELINE_COLOR_BLEND_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineColorBlendStateCreateFlagBits; +typedef VkFlags VkPipelineColorBlendStateCreateFlags; +typedef VkFlags VkPipelineDynamicStateCreateFlags; + +typedef enum VkPipelineLayoutCreateFlagBits { + VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT = 0x00000002, + VK_PIPELINE_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineLayoutCreateFlagBits; +typedef VkFlags VkPipelineLayoutCreateFlags; +typedef VkFlags VkShaderStageFlags; + +typedef enum VkSamplerCreateFlagBits { + VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT = 0x00000001, + VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT = 0x00000002, + VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000008, + VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT = 0x00000004, + VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM = 0x00000010, + VK_SAMPLER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSamplerCreateFlagBits; +typedef VkFlags VkSamplerCreateFlags; + +typedef enum VkDescriptorPoolCreateFlagBits { + VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002, + VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT = 0x00000004, + VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_SETS_BIT_NV = 0x00000008, + VK_DESCRIPTOR_POOL_CREATE_ALLOW_OVERALLOCATION_POOLS_BIT_NV = 0x00000010, + VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT, + VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT, + VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorPoolCreateFlagBits; +typedef VkFlags VkDescriptorPoolCreateFlags; +typedef VkFlags VkDescriptorPoolResetFlags; + +typedef enum VkDescriptorSetLayoutCreateFlagBits { + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00000010, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT = 0x00000020, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00000080, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT = 0x00000004, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_PER_STAGE_BIT_NV = 0x00000040, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT, + VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorSetLayoutCreateFlagBits; +typedef VkFlags VkDescriptorSetLayoutCreateFlags; + +typedef enum VkAttachmentDescriptionFlagBits { + VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, + VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkAttachmentDescriptionFlagBits; +typedef VkFlags VkAttachmentDescriptionFlags; + +typedef enum VkDependencyFlagBits { + VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, + VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004, + VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002, + VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT = 0x00000008, + VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT, + VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT, + VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDependencyFlagBits; +typedef VkFlags VkDependencyFlags; + +typedef enum VkFramebufferCreateFlagBits { + VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT = 0x00000001, + VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT, + VK_FRAMEBUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFramebufferCreateFlagBits; +typedef VkFlags VkFramebufferCreateFlags; + +typedef enum VkRenderPassCreateFlagBits { + VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM = 0x00000002, + VK_RENDER_PASS_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkRenderPassCreateFlagBits; +typedef VkFlags VkRenderPassCreateFlags; + +typedef enum VkSubpassDescriptionFlagBits { + VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, + VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, + VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM = 0x00000004, + VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM = 0x00000008, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT = 0x00000010, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT = 0x00000020, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT = 0x00000040, + VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000080, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT, + VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT, + VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubpassDescriptionFlagBits; +typedef VkFlags VkSubpassDescriptionFlags; + +typedef enum VkCommandPoolCreateFlagBits { + VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, + VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, + VK_COMMAND_POOL_CREATE_PROTECTED_BIT = 0x00000004, + VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolCreateFlagBits; +typedef VkFlags VkCommandPoolCreateFlags; + +typedef enum VkCommandPoolResetFlagBits { + VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandPoolResetFlagBits; +typedef VkFlags VkCommandPoolResetFlags; + +typedef enum VkCommandBufferUsageFlagBits { + VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001, + VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002, + VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004, + VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferUsageFlagBits; +typedef VkFlags VkCommandBufferUsageFlags; + +typedef enum VkQueryControlFlagBits { + VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001, + VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkQueryControlFlagBits; +typedef VkFlags VkQueryControlFlags; + +typedef enum VkCommandBufferResetFlagBits { + VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001, + VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkCommandBufferResetFlagBits; +typedef VkFlags VkCommandBufferResetFlags; + +typedef enum VkStencilFaceFlagBits { + VK_STENCIL_FACE_FRONT_BIT = 0x00000001, + VK_STENCIL_FACE_BACK_BIT = 0x00000002, + VK_STENCIL_FACE_FRONT_AND_BACK = 0x00000003, + // VK_STENCIL_FRONT_AND_BACK is a deprecated alias + VK_STENCIL_FRONT_AND_BACK = VK_STENCIL_FACE_FRONT_AND_BACK, + VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkStencilFaceFlagBits; +typedef VkFlags VkStencilFaceFlags; +typedef struct VkExtent2D { + uint32_t width; + uint32_t height; +} VkExtent2D; + +typedef struct VkExtent3D { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkExtent3D; + +typedef struct VkOffset2D { + int32_t x; + int32_t y; +} VkOffset2D; + +typedef struct VkOffset3D { + int32_t x; + int32_t y; + int32_t z; +} VkOffset3D; + +typedef struct VkRect2D { + VkOffset2D offset; + VkExtent2D extent; +} VkRect2D; + +typedef struct VkBaseInStructure { + VkStructureType sType; + const struct VkBaseInStructure* pNext; +} VkBaseInStructure; + +typedef struct VkBaseOutStructure { + VkStructureType sType; + struct VkBaseOutStructure* pNext; +} VkBaseOutStructure; + +typedef struct VkBufferMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier; + +typedef struct VkDispatchIndirectCommand { + uint32_t x; + uint32_t y; + uint32_t z; +} VkDispatchIndirectCommand; + +typedef struct VkDrawIndexedIndirectCommand { + uint32_t indexCount; + uint32_t instanceCount; + uint32_t firstIndex; + int32_t vertexOffset; + uint32_t firstInstance; +} VkDrawIndexedIndirectCommand; + +typedef struct VkDrawIndirectCommand { + uint32_t vertexCount; + uint32_t instanceCount; + uint32_t firstVertex; + uint32_t firstInstance; +} VkDrawIndirectCommand; + +typedef struct VkImageSubresourceRange { + VkImageAspectFlags aspectMask; + uint32_t baseMipLevel; + uint32_t levelCount; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceRange; + +typedef struct VkImageMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier; + +typedef struct VkMemoryBarrier { + VkStructureType sType; + const void* pNext; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; +} VkMemoryBarrier; + +typedef struct VkPipelineCacheHeaderVersionOne { + uint32_t headerSize; + VkPipelineCacheHeaderVersion headerVersion; + uint32_t vendorID; + uint32_t deviceID; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; +} VkPipelineCacheHeaderVersionOne; + +typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)( + void* pUserData, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkFreeFunction)( + void* pUserData, + void* pMemory); + +typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)( + void* pUserData, + size_t size, + VkInternalAllocationType allocationType, + VkSystemAllocationScope allocationScope); + +typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)( + void* pUserData, + void* pOriginal, + size_t size, + size_t alignment, + VkSystemAllocationScope allocationScope); + +typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void); +typedef struct VkAllocationCallbacks { + void* pUserData; + PFN_vkAllocationFunction pfnAllocation; + PFN_vkReallocationFunction pfnReallocation; + PFN_vkFreeFunction pfnFree; + PFN_vkInternalAllocationNotification pfnInternalAllocation; + PFN_vkInternalFreeNotification pfnInternalFree; +} VkAllocationCallbacks; + +typedef struct VkApplicationInfo { + VkStructureType sType; + const void* pNext; + const char* pApplicationName; + uint32_t applicationVersion; + const char* pEngineName; + uint32_t engineVersion; + uint32_t apiVersion; +} VkApplicationInfo; + +typedef struct VkFormatProperties { + VkFormatFeatureFlags linearTilingFeatures; + VkFormatFeatureFlags optimalTilingFeatures; + VkFormatFeatureFlags bufferFeatures; +} VkFormatProperties; + +typedef struct VkImageFormatProperties { + VkExtent3D maxExtent; + uint32_t maxMipLevels; + uint32_t maxArrayLayers; + VkSampleCountFlags sampleCounts; + VkDeviceSize maxResourceSize; +} VkImageFormatProperties; + +typedef struct VkInstanceCreateInfo { + VkStructureType sType; + const void* pNext; + VkInstanceCreateFlags flags; + const VkApplicationInfo* pApplicationInfo; + uint32_t enabledLayerCount; + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; +} VkInstanceCreateInfo; + +typedef struct VkMemoryHeap { + VkDeviceSize size; + VkMemoryHeapFlags flags; +} VkMemoryHeap; + +typedef struct VkMemoryType { + VkMemoryPropertyFlags propertyFlags; + uint32_t heapIndex; +} VkMemoryType; + +typedef struct VkPhysicalDeviceFeatures { + VkBool32 robustBufferAccess; + VkBool32 fullDrawIndexUint32; + VkBool32 imageCubeArray; + VkBool32 independentBlend; + VkBool32 geometryShader; + VkBool32 tessellationShader; + VkBool32 sampleRateShading; + VkBool32 dualSrcBlend; + VkBool32 logicOp; + VkBool32 multiDrawIndirect; + VkBool32 drawIndirectFirstInstance; + VkBool32 depthClamp; + VkBool32 depthBiasClamp; + VkBool32 fillModeNonSolid; + VkBool32 depthBounds; + VkBool32 wideLines; + VkBool32 largePoints; + VkBool32 alphaToOne; + VkBool32 multiViewport; + VkBool32 samplerAnisotropy; + VkBool32 textureCompressionETC2; + VkBool32 textureCompressionASTC_LDR; + VkBool32 textureCompressionBC; + VkBool32 occlusionQueryPrecise; + VkBool32 pipelineStatisticsQuery; + VkBool32 vertexPipelineStoresAndAtomics; + VkBool32 fragmentStoresAndAtomics; + VkBool32 shaderTessellationAndGeometryPointSize; + VkBool32 shaderImageGatherExtended; + VkBool32 shaderStorageImageExtendedFormats; + VkBool32 shaderStorageImageMultisample; + VkBool32 shaderStorageImageReadWithoutFormat; + VkBool32 shaderStorageImageWriteWithoutFormat; + VkBool32 shaderUniformBufferArrayDynamicIndexing; + VkBool32 shaderSampledImageArrayDynamicIndexing; + VkBool32 shaderStorageBufferArrayDynamicIndexing; + VkBool32 shaderStorageImageArrayDynamicIndexing; + VkBool32 shaderClipDistance; + VkBool32 shaderCullDistance; + VkBool32 shaderFloat64; + VkBool32 shaderInt64; + VkBool32 shaderInt16; + VkBool32 shaderResourceResidency; + VkBool32 shaderResourceMinLod; + VkBool32 sparseBinding; + VkBool32 sparseResidencyBuffer; + VkBool32 sparseResidencyImage2D; + VkBool32 sparseResidencyImage3D; + VkBool32 sparseResidency2Samples; + VkBool32 sparseResidency4Samples; + VkBool32 sparseResidency8Samples; + VkBool32 sparseResidency16Samples; + VkBool32 sparseResidencyAliased; + VkBool32 variableMultisampleRate; + VkBool32 inheritedQueries; +} VkPhysicalDeviceFeatures; + +typedef struct VkPhysicalDeviceLimits { + uint32_t maxImageDimension1D; + uint32_t maxImageDimension2D; + uint32_t maxImageDimension3D; + uint32_t maxImageDimensionCube; + uint32_t maxImageArrayLayers; + uint32_t maxTexelBufferElements; + uint32_t maxUniformBufferRange; + uint32_t maxStorageBufferRange; + uint32_t maxPushConstantsSize; + uint32_t maxMemoryAllocationCount; + uint32_t maxSamplerAllocationCount; + VkDeviceSize bufferImageGranularity; + VkDeviceSize sparseAddressSpaceSize; + uint32_t maxBoundDescriptorSets; + uint32_t maxPerStageDescriptorSamplers; + uint32_t maxPerStageDescriptorUniformBuffers; + uint32_t maxPerStageDescriptorStorageBuffers; + uint32_t maxPerStageDescriptorSampledImages; + uint32_t maxPerStageDescriptorStorageImages; + uint32_t maxPerStageDescriptorInputAttachments; + uint32_t maxPerStageResources; + uint32_t maxDescriptorSetSamplers; + uint32_t maxDescriptorSetUniformBuffers; + uint32_t maxDescriptorSetUniformBuffersDynamic; + uint32_t maxDescriptorSetStorageBuffers; + uint32_t maxDescriptorSetStorageBuffersDynamic; + uint32_t maxDescriptorSetSampledImages; + uint32_t maxDescriptorSetStorageImages; + uint32_t maxDescriptorSetInputAttachments; + uint32_t maxVertexInputAttributes; + uint32_t maxVertexInputBindings; + uint32_t maxVertexInputAttributeOffset; + uint32_t maxVertexInputBindingStride; + uint32_t maxVertexOutputComponents; + uint32_t maxTessellationGenerationLevel; + uint32_t maxTessellationPatchSize; + uint32_t maxTessellationControlPerVertexInputComponents; + uint32_t maxTessellationControlPerVertexOutputComponents; + uint32_t maxTessellationControlPerPatchOutputComponents; + uint32_t maxTessellationControlTotalOutputComponents; + uint32_t maxTessellationEvaluationInputComponents; + uint32_t maxTessellationEvaluationOutputComponents; + uint32_t maxGeometryShaderInvocations; + uint32_t maxGeometryInputComponents; + uint32_t maxGeometryOutputComponents; + uint32_t maxGeometryOutputVertices; + uint32_t maxGeometryTotalOutputComponents; + uint32_t maxFragmentInputComponents; + uint32_t maxFragmentOutputAttachments; + uint32_t maxFragmentDualSrcAttachments; + uint32_t maxFragmentCombinedOutputResources; + uint32_t maxComputeSharedMemorySize; + uint32_t maxComputeWorkGroupCount[3]; + uint32_t maxComputeWorkGroupInvocations; + uint32_t maxComputeWorkGroupSize[3]; + uint32_t subPixelPrecisionBits; + uint32_t subTexelPrecisionBits; + uint32_t mipmapPrecisionBits; + uint32_t maxDrawIndexedIndexValue; + uint32_t maxDrawIndirectCount; + float maxSamplerLodBias; + float maxSamplerAnisotropy; + uint32_t maxViewports; + uint32_t maxViewportDimensions[2]; + float viewportBoundsRange[2]; + uint32_t viewportSubPixelBits; + size_t minMemoryMapAlignment; + VkDeviceSize minTexelBufferOffsetAlignment; + VkDeviceSize minUniformBufferOffsetAlignment; + VkDeviceSize minStorageBufferOffsetAlignment; + int32_t minTexelOffset; + uint32_t maxTexelOffset; + int32_t minTexelGatherOffset; + uint32_t maxTexelGatherOffset; + float minInterpolationOffset; + float maxInterpolationOffset; + uint32_t subPixelInterpolationOffsetBits; + uint32_t maxFramebufferWidth; + uint32_t maxFramebufferHeight; + uint32_t maxFramebufferLayers; + VkSampleCountFlags framebufferColorSampleCounts; + VkSampleCountFlags framebufferDepthSampleCounts; + VkSampleCountFlags framebufferStencilSampleCounts; + VkSampleCountFlags framebufferNoAttachmentsSampleCounts; + uint32_t maxColorAttachments; + VkSampleCountFlags sampledImageColorSampleCounts; + VkSampleCountFlags sampledImageIntegerSampleCounts; + VkSampleCountFlags sampledImageDepthSampleCounts; + VkSampleCountFlags sampledImageStencilSampleCounts; + VkSampleCountFlags storageImageSampleCounts; + uint32_t maxSampleMaskWords; + VkBool32 timestampComputeAndGraphics; + float timestampPeriod; + uint32_t maxClipDistances; + uint32_t maxCullDistances; + uint32_t maxCombinedClipAndCullDistances; + uint32_t discreteQueuePriorities; + float pointSizeRange[2]; + float lineWidthRange[2]; + float pointSizeGranularity; + float lineWidthGranularity; + VkBool32 strictLines; + VkBool32 standardSampleLocations; + VkDeviceSize optimalBufferCopyOffsetAlignment; + VkDeviceSize optimalBufferCopyRowPitchAlignment; + VkDeviceSize nonCoherentAtomSize; +} VkPhysicalDeviceLimits; + +typedef struct VkPhysicalDeviceMemoryProperties { + uint32_t memoryTypeCount; + VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; + uint32_t memoryHeapCount; + VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryProperties; + +typedef struct VkPhysicalDeviceSparseProperties { + VkBool32 residencyStandard2DBlockShape; + VkBool32 residencyStandard2DMultisampleBlockShape; + VkBool32 residencyStandard3DBlockShape; + VkBool32 residencyAlignedMipSize; + VkBool32 residencyNonResidentStrict; +} VkPhysicalDeviceSparseProperties; + +typedef struct VkPhysicalDeviceProperties { + uint32_t apiVersion; + uint32_t driverVersion; + uint32_t vendorID; + uint32_t deviceID; + VkPhysicalDeviceType deviceType; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + VkPhysicalDeviceLimits limits; + VkPhysicalDeviceSparseProperties sparseProperties; +} VkPhysicalDeviceProperties; + +typedef struct VkQueueFamilyProperties { + VkQueueFlags queueFlags; + uint32_t queueCount; + uint32_t timestampValidBits; + VkExtent3D minImageTransferGranularity; +} VkQueueFamilyProperties; + +typedef struct VkDeviceQueueCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueCount; + const float* pQueuePriorities; +} VkDeviceQueueCreateInfo; + +typedef struct VkDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceCreateFlags flags; + uint32_t queueCreateInfoCount; + const VkDeviceQueueCreateInfo* pQueueCreateInfos; + // enabledLayerCount is deprecated and should not be used + uint32_t enabledLayerCount; + // ppEnabledLayerNames is deprecated and should not be used + const char* const* ppEnabledLayerNames; + uint32_t enabledExtensionCount; + const char* const* ppEnabledExtensionNames; + const VkPhysicalDeviceFeatures* pEnabledFeatures; +} VkDeviceCreateInfo; + +typedef struct VkExtensionProperties { + char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; +} VkExtensionProperties; + +typedef struct VkLayerProperties { + char layerName[VK_MAX_EXTENSION_NAME_SIZE]; + uint32_t specVersion; + uint32_t implementationVersion; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkLayerProperties; + +typedef struct VkSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + const VkPipelineStageFlags* pWaitDstStageMask; + uint32_t commandBufferCount; + const VkCommandBuffer* pCommandBuffers; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkSubmitInfo; + +typedef struct VkMappedMemoryRange { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMappedMemoryRange; + +typedef struct VkMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeIndex; +} VkMemoryAllocateInfo; + +typedef struct VkMemoryRequirements { + VkDeviceSize size; + VkDeviceSize alignment; + uint32_t memoryTypeBits; +} VkMemoryRequirements; + +typedef struct VkSparseMemoryBind { + VkDeviceSize resourceOffset; + VkDeviceSize size; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseMemoryBind; + +typedef struct VkSparseBufferMemoryBindInfo { + VkBuffer buffer; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseBufferMemoryBindInfo; + +typedef struct VkSparseImageOpaqueMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseMemoryBind* pBinds; +} VkSparseImageOpaqueMemoryBindInfo; + +typedef struct VkImageSubresource { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t arrayLayer; +} VkImageSubresource; + +typedef struct VkSparseImageMemoryBind { + VkImageSubresource subresource; + VkOffset3D offset; + VkExtent3D extent; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkSparseMemoryBindFlags flags; +} VkSparseImageMemoryBind; + +typedef struct VkSparseImageMemoryBindInfo { + VkImage image; + uint32_t bindCount; + const VkSparseImageMemoryBind* pBinds; +} VkSparseImageMemoryBindInfo; + +typedef struct VkBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t bufferBindCount; + const VkSparseBufferMemoryBindInfo* pBufferBinds; + uint32_t imageOpaqueBindCount; + const VkSparseImageOpaqueMemoryBindInfo* pImageOpaqueBinds; + uint32_t imageBindCount; + const VkSparseImageMemoryBindInfo* pImageBinds; + uint32_t signalSemaphoreCount; + const VkSemaphore* pSignalSemaphores; +} VkBindSparseInfo; + +typedef struct VkSparseImageFormatProperties { + VkImageAspectFlags aspectMask; + VkExtent3D imageGranularity; + VkSparseImageFormatFlags flags; +} VkSparseImageFormatProperties; + +typedef struct VkSparseImageMemoryRequirements { + VkSparseImageFormatProperties formatProperties; + uint32_t imageMipTailFirstLod; + VkDeviceSize imageMipTailSize; + VkDeviceSize imageMipTailOffset; + VkDeviceSize imageMipTailStride; +} VkSparseImageMemoryRequirements; + +typedef struct VkFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkFenceCreateFlags flags; +} VkFenceCreateInfo; + +typedef struct VkSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreCreateFlags flags; +} VkSemaphoreCreateInfo; + +typedef struct VkEventCreateInfo { + VkStructureType sType; + const void* pNext; + VkEventCreateFlags flags; +} VkEventCreateInfo; + +typedef struct VkQueryPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkQueryPoolCreateFlags flags; + VkQueryType queryType; + uint32_t queryCount; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkQueryPoolCreateInfo; + +typedef struct VkBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkDeviceSize size; + VkBufferUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkBufferCreateInfo; + +typedef struct VkBufferViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkBufferViewCreateFlags flags; + VkBuffer buffer; + VkFormat format; + VkDeviceSize offset; + VkDeviceSize range; +} VkBufferViewCreateInfo; + +typedef struct VkImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageType imageType; + VkFormat format; + VkExtent3D extent; + uint32_t mipLevels; + uint32_t arrayLayers; + VkSampleCountFlagBits samples; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkImageLayout initialLayout; +} VkImageCreateInfo; + +typedef struct VkSubresourceLayout { + VkDeviceSize offset; + VkDeviceSize size; + VkDeviceSize rowPitch; + VkDeviceSize arrayPitch; + VkDeviceSize depthPitch; +} VkSubresourceLayout; + +typedef struct VkComponentMapping { + VkComponentSwizzle r; + VkComponentSwizzle g; + VkComponentSwizzle b; + VkComponentSwizzle a; +} VkComponentMapping; + +typedef struct VkImageViewCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageViewCreateFlags flags; + VkImage image; + VkImageViewType viewType; + VkFormat format; + VkComponentMapping components; + VkImageSubresourceRange subresourceRange; +} VkImageViewCreateInfo; + +typedef struct VkShaderModuleCreateInfo { + VkStructureType sType; + const void* pNext; + VkShaderModuleCreateFlags flags; + size_t codeSize; + const uint32_t* pCode; +} VkShaderModuleCreateInfo; + +typedef struct VkPipelineCacheCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCacheCreateFlags flags; + size_t initialDataSize; + const void* pInitialData; +} VkPipelineCacheCreateInfo; + +typedef struct VkSpecializationMapEntry { + uint32_t constantID; + uint32_t offset; + size_t size; +} VkSpecializationMapEntry; + +typedef struct VkSpecializationInfo { + uint32_t mapEntryCount; + const VkSpecializationMapEntry* pMapEntries; + size_t dataSize; + const void* pData; +} VkSpecializationInfo; + +typedef struct VkPipelineShaderStageCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineShaderStageCreateFlags flags; + VkShaderStageFlagBits stage; + VkShaderModule module; + const char* pName; + const VkSpecializationInfo* pSpecializationInfo; +} VkPipelineShaderStageCreateInfo; + +typedef struct VkComputePipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + VkPipelineShaderStageCreateInfo stage; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkComputePipelineCreateInfo; + +typedef struct VkVertexInputBindingDescription { + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; +} VkVertexInputBindingDescription; + +typedef struct VkVertexInputAttributeDescription { + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription; + +typedef struct VkPipelineVertexInputStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineVertexInputStateCreateFlags flags; + uint32_t vertexBindingDescriptionCount; + const VkVertexInputBindingDescription* pVertexBindingDescriptions; + uint32_t vertexAttributeDescriptionCount; + const VkVertexInputAttributeDescription* pVertexAttributeDescriptions; +} VkPipelineVertexInputStateCreateInfo; + +typedef struct VkPipelineInputAssemblyStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineInputAssemblyStateCreateFlags flags; + VkPrimitiveTopology topology; + VkBool32 primitiveRestartEnable; +} VkPipelineInputAssemblyStateCreateInfo; + +typedef struct VkPipelineTessellationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineTessellationStateCreateFlags flags; + uint32_t patchControlPoints; +} VkPipelineTessellationStateCreateInfo; + +typedef struct VkViewport { + float x; + float y; + float width; + float height; + float minDepth; + float maxDepth; +} VkViewport; + +typedef struct VkPipelineViewportStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineViewportStateCreateFlags flags; + uint32_t viewportCount; + const VkViewport* pViewports; + uint32_t scissorCount; + const VkRect2D* pScissors; +} VkPipelineViewportStateCreateInfo; + +typedef struct VkPipelineRasterizationStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateCreateFlags flags; + VkBool32 depthClampEnable; + VkBool32 rasterizerDiscardEnable; + VkPolygonMode polygonMode; + VkCullModeFlags cullMode; + VkFrontFace frontFace; + VkBool32 depthBiasEnable; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; + float lineWidth; +} VkPipelineRasterizationStateCreateInfo; + +typedef struct VkPipelineMultisampleStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineMultisampleStateCreateFlags flags; + VkSampleCountFlagBits rasterizationSamples; + VkBool32 sampleShadingEnable; + float minSampleShading; + const VkSampleMask* pSampleMask; + VkBool32 alphaToCoverageEnable; + VkBool32 alphaToOneEnable; +} VkPipelineMultisampleStateCreateInfo; + +typedef struct VkStencilOpState { + VkStencilOp failOp; + VkStencilOp passOp; + VkStencilOp depthFailOp; + VkCompareOp compareOp; + uint32_t compareMask; + uint32_t writeMask; + uint32_t reference; +} VkStencilOpState; + +typedef struct VkPipelineDepthStencilStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDepthStencilStateCreateFlags flags; + VkBool32 depthTestEnable; + VkBool32 depthWriteEnable; + VkCompareOp depthCompareOp; + VkBool32 depthBoundsTestEnable; + VkBool32 stencilTestEnable; + VkStencilOpState front; + VkStencilOpState back; + float minDepthBounds; + float maxDepthBounds; +} VkPipelineDepthStencilStateCreateInfo; + +typedef struct VkPipelineColorBlendAttachmentState { + VkBool32 blendEnable; + VkBlendFactor srcColorBlendFactor; + VkBlendFactor dstColorBlendFactor; + VkBlendOp colorBlendOp; + VkBlendFactor srcAlphaBlendFactor; + VkBlendFactor dstAlphaBlendFactor; + VkBlendOp alphaBlendOp; + VkColorComponentFlags colorWriteMask; +} VkPipelineColorBlendAttachmentState; + +typedef struct VkPipelineColorBlendStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineColorBlendStateCreateFlags flags; + VkBool32 logicOpEnable; + VkLogicOp logicOp; + uint32_t attachmentCount; + const VkPipelineColorBlendAttachmentState* pAttachments; + float blendConstants[4]; +} VkPipelineColorBlendStateCreateInfo; + +typedef struct VkPipelineDynamicStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineDynamicStateCreateFlags flags; + uint32_t dynamicStateCount; + const VkDynamicState* pDynamicStates; +} VkPipelineDynamicStateCreateInfo; + +typedef struct VkGraphicsPipelineCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineInputAssemblyStateCreateInfo* pInputAssemblyState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; + const VkPipelineViewportStateCreateInfo* pViewportState; + const VkPipelineRasterizationStateCreateInfo* pRasterizationState; + const VkPipelineMultisampleStateCreateInfo* pMultisampleState; + const VkPipelineDepthStencilStateCreateInfo* pDepthStencilState; + const VkPipelineColorBlendStateCreateInfo* pColorBlendState; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkRenderPass renderPass; + uint32_t subpass; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkGraphicsPipelineCreateInfo; + +typedef struct VkPushConstantRange { + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; +} VkPushConstantRange; + +typedef struct VkPipelineLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineLayoutCreateFlags flags; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; +} VkPipelineLayoutCreateInfo; + +typedef struct VkSamplerCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerCreateFlags flags; + VkFilter magFilter; + VkFilter minFilter; + VkSamplerMipmapMode mipmapMode; + VkSamplerAddressMode addressModeU; + VkSamplerAddressMode addressModeV; + VkSamplerAddressMode addressModeW; + float mipLodBias; + VkBool32 anisotropyEnable; + float maxAnisotropy; + VkBool32 compareEnable; + VkCompareOp compareOp; + float minLod; + float maxLod; + VkBorderColor borderColor; + VkBool32 unnormalizedCoordinates; +} VkSamplerCreateInfo; + +typedef struct VkCopyDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet srcSet; + uint32_t srcBinding; + uint32_t srcArrayElement; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; +} VkCopyDescriptorSet; + +typedef struct VkDescriptorBufferInfo { + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize range; +} VkDescriptorBufferInfo; + +typedef struct VkDescriptorImageInfo { + VkSampler sampler; + VkImageView imageView; + VkImageLayout imageLayout; +} VkDescriptorImageInfo; + +typedef struct VkDescriptorPoolSize { + VkDescriptorType type; + uint32_t descriptorCount; +} VkDescriptorPoolSize; + +typedef struct VkDescriptorPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPoolCreateFlags flags; + uint32_t maxSets; + uint32_t poolSizeCount; + const VkDescriptorPoolSize* pPoolSizes; +} VkDescriptorPoolCreateInfo; + +typedef struct VkDescriptorSetAllocateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorPool descriptorPool; + uint32_t descriptorSetCount; + const VkDescriptorSetLayout* pSetLayouts; +} VkDescriptorSetAllocateInfo; + +typedef struct VkDescriptorSetLayoutBinding { + uint32_t binding; + VkDescriptorType descriptorType; + uint32_t descriptorCount; + VkShaderStageFlags stageFlags; + const VkSampler* pImmutableSamplers; +} VkDescriptorSetLayoutBinding; + +typedef struct VkDescriptorSetLayoutCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayoutCreateFlags flags; + uint32_t bindingCount; + const VkDescriptorSetLayoutBinding* pBindings; +} VkDescriptorSetLayoutCreateInfo; + +typedef struct VkWriteDescriptorSet { + VkStructureType sType; + const void* pNext; + VkDescriptorSet dstSet; + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + const VkDescriptorImageInfo* pImageInfo; + const VkDescriptorBufferInfo* pBufferInfo; + const VkBufferView* pTexelBufferView; +} VkWriteDescriptorSet; + +typedef struct VkAttachmentDescription { + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription; + +typedef struct VkAttachmentReference { + uint32_t attachment; + VkImageLayout layout; +} VkAttachmentReference; + +typedef struct VkFramebufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkFramebufferCreateFlags flags; + VkRenderPass renderPass; + uint32_t attachmentCount; + const VkImageView* pAttachments; + uint32_t width; + uint32_t height; + uint32_t layers; +} VkFramebufferCreateInfo; + +typedef struct VkSubpassDescription { + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t inputAttachmentCount; + const VkAttachmentReference* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference* pColorAttachments; + const VkAttachmentReference* pResolveAttachments; + const VkAttachmentReference* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription; + +typedef struct VkSubpassDependency { + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; +} VkSubpassDependency; + +typedef struct VkRenderPassCreateInfo { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency* pDependencies; +} VkRenderPassCreateInfo; + +typedef struct VkCommandPoolCreateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPoolCreateFlags flags; + uint32_t queueFamilyIndex; +} VkCommandPoolCreateInfo; + +typedef struct VkCommandBufferAllocateInfo { + VkStructureType sType; + const void* pNext; + VkCommandPool commandPool; + VkCommandBufferLevel level; + uint32_t commandBufferCount; +} VkCommandBufferAllocateInfo; + +typedef struct VkCommandBufferInheritanceInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + uint32_t subpass; + VkFramebuffer framebuffer; + VkBool32 occlusionQueryEnable; + VkQueryControlFlags queryFlags; + VkQueryPipelineStatisticFlags pipelineStatistics; +} VkCommandBufferInheritanceInfo; + +typedef struct VkCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + VkCommandBufferUsageFlags flags; + const VkCommandBufferInheritanceInfo* pInheritanceInfo; +} VkCommandBufferBeginInfo; + +typedef struct VkBufferCopy { + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy; + +typedef struct VkImageSubresourceLayers { + VkImageAspectFlags aspectMask; + uint32_t mipLevel; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkImageSubresourceLayers; + +typedef struct VkBufferImageCopy { + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy; + +typedef union VkClearColorValue { + float float32[4]; + int32_t int32[4]; + uint32_t uint32[4]; +} VkClearColorValue; + +typedef struct VkClearDepthStencilValue { + float depth; + uint32_t stencil; +} VkClearDepthStencilValue; + +typedef union VkClearValue { + VkClearColorValue color; + VkClearDepthStencilValue depthStencil; +} VkClearValue; + +typedef struct VkClearAttachment { + VkImageAspectFlags aspectMask; + uint32_t colorAttachment; + VkClearValue clearValue; +} VkClearAttachment; + +typedef struct VkClearRect { + VkRect2D rect; + uint32_t baseArrayLayer; + uint32_t layerCount; +} VkClearRect; + +typedef struct VkImageBlit { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit; + +typedef struct VkImageCopy { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy; + +typedef struct VkImageResolve { + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve; + +typedef struct VkRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + VkRenderPass renderPass; + VkFramebuffer framebuffer; + VkRect2D renderArea; + uint32_t clearValueCount; + const VkClearValue* pClearValues; +} VkRenderPassBeginInfo; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance); +typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName); +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice); +typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue); +typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory); +typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData); +typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory); +typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges); +typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence); +typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore); +typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent); +typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event); +typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool); +typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage); +typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout); +typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView); +typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule); +typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler); +typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets); +typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies); +typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer); +typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool); +typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags); +typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers); +typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); +typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo); +typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer); +typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor); +typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data); +typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges); +typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions); +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query); +typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance( + const VkInstanceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkInstance* pInstance); + +VKAPI_ATTR void VKAPI_CALL vkDestroyInstance( + VkInstance instance, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices( + VkInstance instance, + uint32_t* pPhysicalDeviceCount, + VkPhysicalDevice* pPhysicalDevices); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkImageFormatProperties* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties* pMemoryProperties); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr( + VkInstance instance, + const char* pName); + +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr( + VkDevice device, + const char* pName); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice( + VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDevice* pDevice); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDevice( + VkDevice device, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties( + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties( + VkPhysicalDevice physicalDevice, + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties( + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue( + VkDevice device, + uint32_t queueFamilyIndex, + uint32_t queueIndex, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo* pSubmits, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle( + VkQueue queue); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory( + VkDevice device, + const VkMemoryAllocateInfo* pAllocateInfo, + const VkAllocationCallbacks* pAllocator, + VkDeviceMemory* pMemory); + +VKAPI_ATTR void VKAPI_CALL vkFreeMemory( + VkDevice device, + VkDeviceMemory memory, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize offset, + VkDeviceSize size, + VkMemoryMapFlags flags, + void** ppData); + +VKAPI_ATTR void VKAPI_CALL vkUnmapMemory( + VkDevice device, + VkDeviceMemory memory); + +VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment( + VkDevice device, + VkDeviceMemory memory, + VkDeviceSize* pCommittedMemoryInBytes); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory( + VkDevice device, + VkBuffer buffer, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory( + VkDevice device, + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements( + VkDevice device, + VkBuffer buffer, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements( + VkDevice device, + VkImage image, + VkMemoryRequirements* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements( + VkDevice device, + VkImage image, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkSampleCountFlagBits samples, + VkImageUsageFlags usage, + VkImageTiling tiling, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse( + VkQueue queue, + uint32_t bindInfoCount, + const VkBindSparseInfo* pBindInfo, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence( + VkDevice device, + const VkFenceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFence( + VkDevice device, + VkFence fence, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus( + VkDevice device, + VkFence fence); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences( + VkDevice device, + uint32_t fenceCount, + const VkFence* pFences, + VkBool32 waitAll, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore( + VkDevice device, + const VkSemaphoreCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSemaphore* pSemaphore); + +VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore( + VkDevice device, + VkSemaphore semaphore, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent( + VkDevice device, + const VkEventCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkEvent* pEvent); + +VKAPI_ATTR void VKAPI_CALL vkDestroyEvent( + VkDevice device, + VkEvent event, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent( + VkDevice device, + VkEvent event); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool( + VkDevice device, + const VkQueryPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkQueryPool* pQueryPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool( + VkDevice device, + VkQueryPool queryPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + size_t dataSize, + void* pData, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer( + VkDevice device, + const VkBufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBuffer* pBuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer( + VkDevice device, + VkBuffer buffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView( + VkDevice device, + const VkBufferViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView( + VkDevice device, + VkBufferView bufferView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage( + VkDevice device, + const VkImageCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImage* pImage); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImage( + VkDevice device, + VkImage image, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout( + VkDevice device, + VkImage image, + const VkImageSubresource* pSubresource, + VkSubresourceLayout* pLayout); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView( + VkDevice device, + const VkImageViewCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkImageView* pView); + +VKAPI_ATTR void VKAPI_CALL vkDestroyImageView( + VkDevice device, + VkImageView imageView, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkShaderModule* pShaderModule); + +VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule( + VkDevice device, + VkShaderModule shaderModule, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache( + VkDevice device, + const VkPipelineCacheCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineCache* pPipelineCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache( + VkDevice device, + VkPipelineCache pipelineCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData( + VkDevice device, + VkPipelineCache pipelineCache, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches( + VkDevice device, + VkPipelineCache dstCache, + uint32_t srcCacheCount, + const VkPipelineCache* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkComputePipelineCreateInfo* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline( + VkDevice device, + VkPipeline pipeline, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout( + VkDevice device, + const VkPipelineLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineLayout* pPipelineLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout( + VkDevice device, + VkPipelineLayout pipelineLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler( + VkDevice device, + const VkSamplerCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSampler* pSampler); + +VKAPI_ATTR void VKAPI_CALL vkDestroySampler( + VkDevice device, + VkSampler sampler, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorSetLayout* pSetLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout( + VkDevice device, + VkDescriptorSetLayout descriptorSetLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool( + VkDevice device, + const VkDescriptorPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorPool* pDescriptorPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool( + VkDevice device, + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets( + VkDevice device, + const VkDescriptorSetAllocateInfo* pAllocateInfo, + VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets( + VkDevice device, + VkDescriptorPool descriptorPool, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets( + VkDevice device, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites, + uint32_t descriptorCopyCount, + const VkCopyDescriptorSet* pDescriptorCopies); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer( + VkDevice device, + const VkFramebufferCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkFramebuffer* pFramebuffer); + +VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer( + VkDevice device, + VkFramebuffer framebuffer, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass( + VkDevice device, + const VkRenderPassCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass( + VkDevice device, + VkRenderPass renderPass, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity( + VkDevice device, + VkRenderPass renderPass, + VkExtent2D* pGranularity); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool( + VkDevice device, + const VkCommandPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCommandPool* pCommandPool); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool( + VkDevice device, + VkCommandPool commandPool, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolResetFlags flags); + +VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers( + VkDevice device, + const VkCommandBufferAllocateInfo* pAllocateInfo, + VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers( + VkDevice device, + VkCommandPool commandPool, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); + +VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer( + VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo* pBeginInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer( + VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor( + VkCommandBuffer commandBuffer, + uint32_t firstScissor, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth( + VkCommandBuffer commandBuffer, + float lineWidth); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias( + VkCommandBuffer commandBuffer, + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants( + VkCommandBuffer commandBuffer, + const float blendConstants[4]); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds( + VkCommandBuffer commandBuffer, + float minDepthBounds, + float maxDepthBounds); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t compareMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t writeMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + uint32_t reference); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t descriptorSetCount, + const VkDescriptorSet* pDescriptorSets, + uint32_t dynamicOffsetCount, + const uint32_t* pDynamicOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdDraw( + VkCommandBuffer commandBuffer, + uint32_t vertexCount, + uint32_t instanceCount, + uint32_t firstVertex, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed( + VkCommandBuffer commandBuffer, + uint32_t indexCount, + uint32_t instanceCount, + uint32_t firstIndex, + int32_t vertexOffset, + uint32_t firstInstance); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatch( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageBlit* pRegions, + VkFilter filter); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage( + VkCommandBuffer commandBuffer, + VkBuffer srcBuffer, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkBuffer dstBuffer, + uint32_t regionCount, + const VkBufferImageCopy* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize dataSize, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer( + VkCommandBuffer commandBuffer, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize size, + uint32_t data); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearColorValue* pColor, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage( + VkCommandBuffer commandBuffer, + VkImage image, + VkImageLayout imageLayout, + const VkClearDepthStencilValue* pDepthStencil, + uint32_t rangeCount, + const VkImageSubresourceRange* pRanges); + +VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkClearAttachment* pAttachments, + uint32_t rectCount, + const VkClearRect* pRects); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage( + VkCommandBuffer commandBuffer, + VkImage srcImage, + VkImageLayout srcImageLayout, + VkImage dstImage, + VkImageLayout dstImageLayout, + uint32_t regionCount, + const VkImageResolve* pRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags srcStageMask, + VkPipelineStageFlags dstStageMask, + VkDependencyFlags dependencyFlags, + uint32_t memoryBarrierCount, + const VkMemoryBarrier* pMemoryBarriers, + uint32_t bufferMemoryBarrierCount, + const VkBufferMemoryBarrier* pBufferMemoryBarriers, + uint32_t imageMemoryBarrierCount, + const VkImageMemoryBarrier* pImageMemoryBarriers); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + VkDeviceSize stride, + VkQueryResultFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants( + VkCommandBuffer commandBuffer, + VkPipelineLayout layout, + VkShaderStageFlags stageFlags, + uint32_t offset, + uint32_t size, + const void* pValues); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass( + VkCommandBuffer commandBuffer, + VkSubpassContents contents); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( + VkCommandBuffer commandBuffer, + uint32_t commandBufferCount, + const VkCommandBuffer* pCommandBuffers); +#endif + + +// VK_VERSION_1_1 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_1 1 +// Vulkan 1.1 version number +#define VK_API_VERSION_1_1 VK_MAKE_API_VERSION(0, 1, 1, 0)// Patch version should always be set to 0 + +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate) +#define VK_MAX_DEVICE_GROUP_SIZE 32U +#define VK_LUID_SIZE 8U +#define VK_QUEUE_FAMILY_EXTERNAL (~1U) + +typedef enum VkPointClippingBehavior { + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES = 0, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1, + VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES, + VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY, + VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF +} VkPointClippingBehavior; + +typedef enum VkTessellationDomainOrigin { + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT = 0, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1, + VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT, + VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF +} VkTessellationDomainOrigin; + +typedef enum VkSamplerYcbcrModelConversion { + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY = 0, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY = 1, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709 = 2, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601 = 3, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 = 4, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020, + VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrModelConversion; + +typedef enum VkSamplerYcbcrRange { + VK_SAMPLER_YCBCR_RANGE_ITU_FULL = 0, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1, + VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL, + VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW, + VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerYcbcrRange; + +typedef enum VkChromaLocation { + VK_CHROMA_LOCATION_COSITED_EVEN = 0, + VK_CHROMA_LOCATION_MIDPOINT = 1, + VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN, + VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT, + VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF +} VkChromaLocation; + +typedef enum VkDescriptorUpdateTemplateType { + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET, + VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorUpdateTemplateType; + +typedef enum VkSubgroupFeatureFlagBits { + VK_SUBGROUP_FEATURE_BASIC_BIT = 0x00000001, + VK_SUBGROUP_FEATURE_VOTE_BIT = 0x00000002, + VK_SUBGROUP_FEATURE_ARITHMETIC_BIT = 0x00000004, + VK_SUBGROUP_FEATURE_BALLOT_BIT = 0x00000008, + VK_SUBGROUP_FEATURE_SHUFFLE_BIT = 0x00000010, + VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT = 0x00000020, + VK_SUBGROUP_FEATURE_CLUSTERED_BIT = 0x00000040, + VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080, + VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100, + VK_SUBGROUP_FEATURE_ROTATE_BIT_KHR = 0x00000200, + VK_SUBGROUP_FEATURE_ROTATE_CLUSTERED_BIT_KHR = 0x00000400, + VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubgroupFeatureFlagBits; +typedef VkFlags VkSubgroupFeatureFlags; + +typedef enum VkPeerMemoryFeatureFlagBits { + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT = 0x00000001, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT = 0x00000002, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT = 0x00000004, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT = 0x00000008, + VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT, + VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT, + VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT, + VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPeerMemoryFeatureFlagBits; +typedef VkFlags VkPeerMemoryFeatureFlags; + +typedef enum VkMemoryAllocateFlagBits { + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT = 0x00000001, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT = 0x00000002, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000004, + VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT, + VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT, + VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkMemoryAllocateFlagBits; +typedef VkFlags VkMemoryAllocateFlags; +typedef VkFlags VkCommandPoolTrimFlags; +typedef VkFlags VkDescriptorUpdateTemplateCreateFlags; + +typedef enum VkExternalMemoryHandleTypeFlagBits { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT = 0x00000010, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT = 0x00000020, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT = 0x00000040, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT = 0x00000200, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID = 0x00000400, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT = 0x00000080, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA = 0x00000800, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV = 0x00001000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_SCREEN_BUFFER_BIT_QNX = 0x00004000, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBits; +typedef VkFlags VkExternalMemoryHandleTypeFlags; + +typedef enum VkExternalMemoryFeatureFlagBits { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBits; +typedef VkFlags VkExternalMemoryFeatureFlags; + +typedef enum VkExternalFenceHandleTypeFlagBits { + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000008, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceHandleTypeFlagBits; +typedef VkFlags VkExternalFenceHandleTypeFlags; + +typedef enum VkExternalFenceFeatureFlagBits { + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalFenceFeatureFlagBits; +typedef VkFlags VkExternalFenceFeatureFlags; + +typedef enum VkFenceImportFlagBits { + VK_FENCE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = VK_FENCE_IMPORT_TEMPORARY_BIT, + VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkFenceImportFlagBits; +typedef VkFlags VkFenceImportFlags; + +typedef enum VkSemaphoreImportFlagBits { + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT = 0x00000001, + VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, + VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreImportFlagBits; +typedef VkFlags VkSemaphoreImportFlags; + +typedef enum VkExternalSemaphoreHandleTypeFlagBits { + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA = 0x00000080, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, + VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreHandleTypeFlagBits; +typedef VkFlags VkExternalSemaphoreHandleTypeFlags; + +typedef enum VkExternalSemaphoreFeatureFlagBits { + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT = 0x00000001, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT = 0x00000002, + VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT, + VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkExternalSemaphoreFeatureFlagBits; +typedef VkFlags VkExternalSemaphoreFeatureFlags; +typedef struct VkPhysicalDeviceSubgroupProperties { + VkStructureType sType; + void* pNext; + uint32_t subgroupSize; + VkShaderStageFlags supportedStages; + VkSubgroupFeatureFlags supportedOperations; + VkBool32 quadOperationsInAllStages; +} VkPhysicalDeviceSubgroupProperties; + +typedef struct VkBindBufferMemoryInfo { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindBufferMemoryInfo; + +typedef struct VkBindImageMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; +} VkBindImageMemoryInfo; + +typedef struct VkPhysicalDevice16BitStorageFeatures { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer16BitAccess; + VkBool32 uniformAndStorageBuffer16BitAccess; + VkBool32 storagePushConstant16; + VkBool32 storageInputOutput16; +} VkPhysicalDevice16BitStorageFeatures; + +typedef struct VkMemoryDedicatedRequirements { + VkStructureType sType; + void* pNext; + VkBool32 prefersDedicatedAllocation; + VkBool32 requiresDedicatedAllocation; +} VkMemoryDedicatedRequirements; + +typedef struct VkMemoryDedicatedAllocateInfo { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkMemoryDedicatedAllocateInfo; + +typedef struct VkMemoryAllocateFlagsInfo { + VkStructureType sType; + const void* pNext; + VkMemoryAllocateFlags flags; + uint32_t deviceMask; +} VkMemoryAllocateFlagsInfo; + +typedef struct VkDeviceGroupRenderPassBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; + uint32_t deviceRenderAreaCount; + const VkRect2D* pDeviceRenderAreas; +} VkDeviceGroupRenderPassBeginInfo; + +typedef struct VkDeviceGroupCommandBufferBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceMask; +} VkDeviceGroupCommandBufferBeginInfo; + +typedef struct VkDeviceGroupSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const uint32_t* pWaitSemaphoreDeviceIndices; + uint32_t commandBufferCount; + const uint32_t* pCommandBufferDeviceMasks; + uint32_t signalSemaphoreCount; + const uint32_t* pSignalSemaphoreDeviceIndices; +} VkDeviceGroupSubmitInfo; + +typedef struct VkDeviceGroupBindSparseInfo { + VkStructureType sType; + const void* pNext; + uint32_t resourceDeviceIndex; + uint32_t memoryDeviceIndex; +} VkDeviceGroupBindSparseInfo; + +typedef struct VkBindBufferMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindBufferMemoryDeviceGroupInfo; + +typedef struct VkBindImageMemoryDeviceGroupInfo { + VkStructureType sType; + const void* pNext; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; + uint32_t splitInstanceBindRegionCount; + const VkRect2D* pSplitInstanceBindRegions; +} VkBindImageMemoryDeviceGroupInfo; + +typedef struct VkPhysicalDeviceGroupProperties { + VkStructureType sType; + void* pNext; + uint32_t physicalDeviceCount; + VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE]; + VkBool32 subsetAllocation; +} VkPhysicalDeviceGroupProperties; + +typedef struct VkDeviceGroupDeviceCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t physicalDeviceCount; + const VkPhysicalDevice* pPhysicalDevices; +} VkDeviceGroupDeviceCreateInfo; + +typedef struct VkBufferMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferMemoryRequirementsInfo2; + +typedef struct VkImageMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageMemoryRequirementsInfo2; + +typedef struct VkImageSparseMemoryRequirementsInfo2 { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageSparseMemoryRequirementsInfo2; + +typedef struct VkMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkMemoryRequirements memoryRequirements; +} VkMemoryRequirements2; + +typedef struct VkSparseImageMemoryRequirements2 { + VkStructureType sType; + void* pNext; + VkSparseImageMemoryRequirements memoryRequirements; +} VkSparseImageMemoryRequirements2; + +typedef struct VkPhysicalDeviceFeatures2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceFeatures features; +} VkPhysicalDeviceFeatures2; + +typedef struct VkPhysicalDeviceProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceProperties properties; +} VkPhysicalDeviceProperties2; + +typedef struct VkFormatProperties2 { + VkStructureType sType; + void* pNext; + VkFormatProperties formatProperties; +} VkFormatProperties2; + +typedef struct VkImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkImageFormatProperties imageFormatProperties; +} VkImageFormatProperties2; + +typedef struct VkPhysicalDeviceImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkImageTiling tiling; + VkImageUsageFlags usage; + VkImageCreateFlags flags; +} VkPhysicalDeviceImageFormatInfo2; + +typedef struct VkQueueFamilyProperties2 { + VkStructureType sType; + void* pNext; + VkQueueFamilyProperties queueFamilyProperties; +} VkQueueFamilyProperties2; + +typedef struct VkPhysicalDeviceMemoryProperties2 { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceMemoryProperties memoryProperties; +} VkPhysicalDeviceMemoryProperties2; + +typedef struct VkSparseImageFormatProperties2 { + VkStructureType sType; + void* pNext; + VkSparseImageFormatProperties properties; +} VkSparseImageFormatProperties2; + +typedef struct VkPhysicalDeviceSparseImageFormatInfo2 { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkImageType type; + VkSampleCountFlagBits samples; + VkImageUsageFlags usage; + VkImageTiling tiling; +} VkPhysicalDeviceSparseImageFormatInfo2; + +typedef struct VkPhysicalDevicePointClippingProperties { + VkStructureType sType; + void* pNext; + VkPointClippingBehavior pointClippingBehavior; +} VkPhysicalDevicePointClippingProperties; + +typedef struct VkInputAttachmentAspectReference { + uint32_t subpass; + uint32_t inputAttachmentIndex; + VkImageAspectFlags aspectMask; +} VkInputAttachmentAspectReference; + +typedef struct VkRenderPassInputAttachmentAspectCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t aspectReferenceCount; + const VkInputAttachmentAspectReference* pAspectReferences; +} VkRenderPassInputAttachmentAspectCreateInfo; + +typedef struct VkImageViewUsageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags usage; +} VkImageViewUsageCreateInfo; + +typedef struct VkPipelineTessellationDomainOriginStateCreateInfo { + VkStructureType sType; + const void* pNext; + VkTessellationDomainOrigin domainOrigin; +} VkPipelineTessellationDomainOriginStateCreateInfo; + +typedef struct VkRenderPassMultiviewCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t subpassCount; + const uint32_t* pViewMasks; + uint32_t dependencyCount; + const int32_t* pViewOffsets; + uint32_t correlationMaskCount; + const uint32_t* pCorrelationMasks; +} VkRenderPassMultiviewCreateInfo; + +typedef struct VkPhysicalDeviceMultiviewFeatures { + VkStructureType sType; + void* pNext; + VkBool32 multiview; + VkBool32 multiviewGeometryShader; + VkBool32 multiviewTessellationShader; +} VkPhysicalDeviceMultiviewFeatures; + +typedef struct VkPhysicalDeviceMultiviewProperties { + VkStructureType sType; + void* pNext; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; +} VkPhysicalDeviceMultiviewProperties; + +typedef struct VkPhysicalDeviceVariablePointersFeatures { + VkStructureType sType; + void* pNext; + VkBool32 variablePointersStorageBuffer; + VkBool32 variablePointers; +} VkPhysicalDeviceVariablePointersFeatures; + +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 protectedMemory; +} VkPhysicalDeviceProtectedMemoryFeatures; + +typedef struct VkPhysicalDeviceProtectedMemoryProperties { + VkStructureType sType; + void* pNext; + VkBool32 protectedNoFault; +} VkPhysicalDeviceProtectedMemoryProperties; + +typedef struct VkDeviceQueueInfo2 { + VkStructureType sType; + const void* pNext; + VkDeviceQueueCreateFlags flags; + uint32_t queueFamilyIndex; + uint32_t queueIndex; +} VkDeviceQueueInfo2; + +typedef struct VkProtectedSubmitInfo { + VkStructureType sType; + const void* pNext; + VkBool32 protectedSubmit; +} VkProtectedSubmitInfo; + +typedef struct VkSamplerYcbcrConversionCreateInfo { + VkStructureType sType; + const void* pNext; + VkFormat format; + VkSamplerYcbcrModelConversion ycbcrModel; + VkSamplerYcbcrRange ycbcrRange; + VkComponentMapping components; + VkChromaLocation xChromaOffset; + VkChromaLocation yChromaOffset; + VkFilter chromaFilter; + VkBool32 forceExplicitReconstruction; +} VkSamplerYcbcrConversionCreateInfo; + +typedef struct VkSamplerYcbcrConversionInfo { + VkStructureType sType; + const void* pNext; + VkSamplerYcbcrConversion conversion; +} VkSamplerYcbcrConversionInfo; + +typedef struct VkBindImagePlaneMemoryInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkBindImagePlaneMemoryInfo; + +typedef struct VkImagePlaneMemoryRequirementsInfo { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits planeAspect; +} VkImagePlaneMemoryRequirementsInfo; + +typedef struct VkPhysicalDeviceSamplerYcbcrConversionFeatures { + VkStructureType sType; + void* pNext; + VkBool32 samplerYcbcrConversion; +} VkPhysicalDeviceSamplerYcbcrConversionFeatures; + +typedef struct VkSamplerYcbcrConversionImageFormatProperties { + VkStructureType sType; + void* pNext; + uint32_t combinedImageSamplerDescriptorCount; +} VkSamplerYcbcrConversionImageFormatProperties; + +typedef struct VkDescriptorUpdateTemplateEntry { + uint32_t dstBinding; + uint32_t dstArrayElement; + uint32_t descriptorCount; + VkDescriptorType descriptorType; + size_t offset; + size_t stride; +} VkDescriptorUpdateTemplateEntry; + +typedef struct VkDescriptorUpdateTemplateCreateInfo { + VkStructureType sType; + const void* pNext; + VkDescriptorUpdateTemplateCreateFlags flags; + uint32_t descriptorUpdateEntryCount; + const VkDescriptorUpdateTemplateEntry* pDescriptorUpdateEntries; + VkDescriptorUpdateTemplateType templateType; + VkDescriptorSetLayout descriptorSetLayout; + VkPipelineBindPoint pipelineBindPoint; + VkPipelineLayout pipelineLayout; + uint32_t set; +} VkDescriptorUpdateTemplateCreateInfo; + +typedef struct VkExternalMemoryProperties { + VkExternalMemoryFeatureFlags externalMemoryFeatures; + VkExternalMemoryHandleTypeFlags exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlags compatibleHandleTypes; +} VkExternalMemoryProperties; + +typedef struct VkPhysicalDeviceExternalImageFormatInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalImageFormatInfo; + +typedef struct VkExternalImageFormatProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalImageFormatProperties; + +typedef struct VkPhysicalDeviceExternalBufferInfo { + VkStructureType sType; + const void* pNext; + VkBufferCreateFlags flags; + VkBufferUsageFlags usage; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalBufferInfo; + +typedef struct VkExternalBufferProperties { + VkStructureType sType; + void* pNext; + VkExternalMemoryProperties externalMemoryProperties; +} VkExternalBufferProperties; + +typedef struct VkPhysicalDeviceIDProperties { + VkStructureType sType; + void* pNext; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + VkBool32 deviceLUIDValid; +} VkPhysicalDeviceIDProperties; + +typedef struct VkExternalMemoryImageCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryImageCreateInfo; + +typedef struct VkExternalMemoryBufferCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExternalMemoryBufferCreateInfo; + +typedef struct VkExportMemoryAllocateInfo { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlags handleTypes; +} VkExportMemoryAllocateInfo; + +typedef struct VkPhysicalDeviceExternalFenceInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalFenceInfo; + +typedef struct VkExternalFenceProperties { + VkStructureType sType; + void* pNext; + VkExternalFenceHandleTypeFlags exportFromImportedHandleTypes; + VkExternalFenceHandleTypeFlags compatibleHandleTypes; + VkExternalFenceFeatureFlags externalFenceFeatures; +} VkExternalFenceProperties; + +typedef struct VkExportFenceCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalFenceHandleTypeFlags handleTypes; +} VkExportFenceCreateInfo; + +typedef struct VkExportSemaphoreCreateInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlags handleTypes; +} VkExportSemaphoreCreateInfo; + +typedef struct VkPhysicalDeviceExternalSemaphoreInfo { + VkStructureType sType; + const void* pNext; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkPhysicalDeviceExternalSemaphoreInfo; + +typedef struct VkExternalSemaphoreProperties { + VkStructureType sType; + void* pNext; + VkExternalSemaphoreHandleTypeFlags exportFromImportedHandleTypes; + VkExternalSemaphoreHandleTypeFlags compatibleHandleTypes; + VkExternalSemaphoreFeatureFlags externalSemaphoreFeatures; +} VkExternalSemaphoreProperties; + +typedef struct VkPhysicalDeviceMaintenance3Properties { + VkStructureType sType; + void* pNext; + uint32_t maxPerSetDescriptors; + VkDeviceSize maxMemoryAllocationSize; +} VkPhysicalDeviceMaintenance3Properties; + +typedef struct VkDescriptorSetLayoutSupport { + VkStructureType sType; + void* pNext; + VkBool32 supported; +} VkDescriptorSetLayoutSupport; + +typedef struct VkPhysicalDeviceShaderDrawParametersFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderDrawParameters; +} VkPhysicalDeviceShaderDrawParametersFeatures; + +typedef VkPhysicalDeviceShaderDrawParametersFeatures VkPhysicalDeviceShaderDrawParameterFeatures; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceVersion)(uint32_t* pApiVersion); +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeatures)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMask)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBase)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroups)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkTrimCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); +typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue2)(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue); +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversion)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversion)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplate)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplate)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplate)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFenceProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupport)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion( + uint32_t* pApiVersion); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2( + VkDevice device, + const VkDeviceQueueInfo2* pQueueInfo, + VkQueue* pQueue); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + + +// VK_VERSION_1_2 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_2 1 +// Vulkan 1.2 version number +#define VK_API_VERSION_1_2 VK_MAKE_API_VERSION(0, 1, 2, 0)// Patch version should always be set to 0 + +#define VK_MAX_DRIVER_NAME_SIZE 256U +#define VK_MAX_DRIVER_INFO_SIZE 256U + +typedef enum VkDriverId { + VK_DRIVER_ID_AMD_PROPRIETARY = 1, + VK_DRIVER_ID_AMD_OPEN_SOURCE = 2, + VK_DRIVER_ID_MESA_RADV = 3, + VK_DRIVER_ID_NVIDIA_PROPRIETARY = 4, + VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS = 5, + VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA = 6, + VK_DRIVER_ID_IMAGINATION_PROPRIETARY = 7, + VK_DRIVER_ID_QUALCOMM_PROPRIETARY = 8, + VK_DRIVER_ID_ARM_PROPRIETARY = 9, + VK_DRIVER_ID_GOOGLE_SWIFTSHADER = 10, + VK_DRIVER_ID_GGP_PROPRIETARY = 11, + VK_DRIVER_ID_BROADCOM_PROPRIETARY = 12, + VK_DRIVER_ID_MESA_LLVMPIPE = 13, + VK_DRIVER_ID_MOLTENVK = 14, + VK_DRIVER_ID_COREAVI_PROPRIETARY = 15, + VK_DRIVER_ID_JUICE_PROPRIETARY = 16, + VK_DRIVER_ID_VERISILICON_PROPRIETARY = 17, + VK_DRIVER_ID_MESA_TURNIP = 18, + VK_DRIVER_ID_MESA_V3DV = 19, + VK_DRIVER_ID_MESA_PANVK = 20, + VK_DRIVER_ID_SAMSUNG_PROPRIETARY = 21, + VK_DRIVER_ID_MESA_VENUS = 22, + VK_DRIVER_ID_MESA_DOZEN = 23, + VK_DRIVER_ID_MESA_NVK = 24, + VK_DRIVER_ID_IMAGINATION_OPEN_SOURCE_MESA = 25, + VK_DRIVER_ID_MESA_HONEYKRISP = 26, + VK_DRIVER_ID_RESERVED_27 = 27, + VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY, + VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE, + VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV, + VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR = VK_DRIVER_ID_NVIDIA_PROPRIETARY, + VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS, + VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA, + VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = VK_DRIVER_ID_IMAGINATION_PROPRIETARY, + VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = VK_DRIVER_ID_QUALCOMM_PROPRIETARY, + VK_DRIVER_ID_ARM_PROPRIETARY_KHR = VK_DRIVER_ID_ARM_PROPRIETARY, + VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = VK_DRIVER_ID_GOOGLE_SWIFTSHADER, + VK_DRIVER_ID_GGP_PROPRIETARY_KHR = VK_DRIVER_ID_GGP_PROPRIETARY, + VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY, + VK_DRIVER_ID_MAX_ENUM = 0x7FFFFFFF +} VkDriverId; + +typedef enum VkShaderFloatControlsIndependence { + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY = 0, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL = 1, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE = 2, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE, + VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM = 0x7FFFFFFF +} VkShaderFloatControlsIndependence; + +typedef enum VkSamplerReductionMode { + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE = 0, + VK_SAMPLER_REDUCTION_MODE_MIN = 1, + VK_SAMPLER_REDUCTION_MODE_MAX = 2, + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_RANGECLAMP_QCOM = 1000521000, + VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE, + VK_SAMPLER_REDUCTION_MODE_MIN_EXT = VK_SAMPLER_REDUCTION_MODE_MIN, + VK_SAMPLER_REDUCTION_MODE_MAX_EXT = VK_SAMPLER_REDUCTION_MODE_MAX, + VK_SAMPLER_REDUCTION_MODE_MAX_ENUM = 0x7FFFFFFF +} VkSamplerReductionMode; + +typedef enum VkSemaphoreType { + VK_SEMAPHORE_TYPE_BINARY = 0, + VK_SEMAPHORE_TYPE_TIMELINE = 1, + VK_SEMAPHORE_TYPE_BINARY_KHR = VK_SEMAPHORE_TYPE_BINARY, + VK_SEMAPHORE_TYPE_TIMELINE_KHR = VK_SEMAPHORE_TYPE_TIMELINE, + VK_SEMAPHORE_TYPE_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreType; + +typedef enum VkResolveModeFlagBits { + VK_RESOLVE_MODE_NONE = 0, + VK_RESOLVE_MODE_SAMPLE_ZERO_BIT = 0x00000001, + VK_RESOLVE_MODE_AVERAGE_BIT = 0x00000002, + VK_RESOLVE_MODE_MIN_BIT = 0x00000004, + VK_RESOLVE_MODE_MAX_BIT = 0x00000008, + VK_RESOLVE_MODE_EXTERNAL_FORMAT_DOWNSAMPLE_ANDROID = 0x00000010, + VK_RESOLVE_MODE_NONE_KHR = VK_RESOLVE_MODE_NONE, + VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT, + VK_RESOLVE_MODE_AVERAGE_BIT_KHR = VK_RESOLVE_MODE_AVERAGE_BIT, + VK_RESOLVE_MODE_MIN_BIT_KHR = VK_RESOLVE_MODE_MIN_BIT, + VK_RESOLVE_MODE_MAX_BIT_KHR = VK_RESOLVE_MODE_MAX_BIT, + VK_RESOLVE_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkResolveModeFlagBits; +typedef VkFlags VkResolveModeFlags; + +typedef enum VkDescriptorBindingFlagBits { + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT = 0x00000001, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT = 0x00000002, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT = 0x00000004, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT = 0x00000008, + VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT, + VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, + VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT, + VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT, + VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkDescriptorBindingFlagBits; +typedef VkFlags VkDescriptorBindingFlags; + +typedef enum VkSemaphoreWaitFlagBits { + VK_SEMAPHORE_WAIT_ANY_BIT = 0x00000001, + VK_SEMAPHORE_WAIT_ANY_BIT_KHR = VK_SEMAPHORE_WAIT_ANY_BIT, + VK_SEMAPHORE_WAIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSemaphoreWaitFlagBits; +typedef VkFlags VkSemaphoreWaitFlags; +typedef struct VkPhysicalDeviceVulkan11Features { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer16BitAccess; + VkBool32 uniformAndStorageBuffer16BitAccess; + VkBool32 storagePushConstant16; + VkBool32 storageInputOutput16; + VkBool32 multiview; + VkBool32 multiviewGeometryShader; + VkBool32 multiviewTessellationShader; + VkBool32 variablePointersStorageBuffer; + VkBool32 variablePointers; + VkBool32 protectedMemory; + VkBool32 samplerYcbcrConversion; + VkBool32 shaderDrawParameters; +} VkPhysicalDeviceVulkan11Features; + +typedef struct VkPhysicalDeviceVulkan11Properties { + VkStructureType sType; + void* pNext; + uint8_t deviceUUID[VK_UUID_SIZE]; + uint8_t driverUUID[VK_UUID_SIZE]; + uint8_t deviceLUID[VK_LUID_SIZE]; + uint32_t deviceNodeMask; + VkBool32 deviceLUIDValid; + uint32_t subgroupSize; + VkShaderStageFlags subgroupSupportedStages; + VkSubgroupFeatureFlags subgroupSupportedOperations; + VkBool32 subgroupQuadOperationsInAllStages; + VkPointClippingBehavior pointClippingBehavior; + uint32_t maxMultiviewViewCount; + uint32_t maxMultiviewInstanceIndex; + VkBool32 protectedNoFault; + uint32_t maxPerSetDescriptors; + VkDeviceSize maxMemoryAllocationSize; +} VkPhysicalDeviceVulkan11Properties; + +typedef struct VkPhysicalDeviceVulkan12Features { + VkStructureType sType; + void* pNext; + VkBool32 samplerMirrorClampToEdge; + VkBool32 drawIndirectCount; + VkBool32 storageBuffer8BitAccess; + VkBool32 uniformAndStorageBuffer8BitAccess; + VkBool32 storagePushConstant8; + VkBool32 shaderBufferInt64Atomics; + VkBool32 shaderSharedInt64Atomics; + VkBool32 shaderFloat16; + VkBool32 shaderInt8; + VkBool32 descriptorIndexing; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; + VkBool32 samplerFilterMinmax; + VkBool32 scalarBlockLayout; + VkBool32 imagelessFramebuffer; + VkBool32 uniformBufferStandardLayout; + VkBool32 shaderSubgroupExtendedTypes; + VkBool32 separateDepthStencilLayouts; + VkBool32 hostQueryReset; + VkBool32 timelineSemaphore; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; + VkBool32 vulkanMemoryModel; + VkBool32 vulkanMemoryModelDeviceScope; + VkBool32 vulkanMemoryModelAvailabilityVisibilityChains; + VkBool32 shaderOutputViewportIndex; + VkBool32 shaderOutputLayer; + VkBool32 subgroupBroadcastDynamicId; +} VkPhysicalDeviceVulkan12Features; + +typedef struct VkConformanceVersion { + uint8_t major; + uint8_t minor; + uint8_t subminor; + uint8_t patch; +} VkConformanceVersion; + +typedef struct VkPhysicalDeviceVulkan12Properties { + VkStructureType sType; + void* pNext; + VkDriverId driverID; + char driverName[VK_MAX_DRIVER_NAME_SIZE]; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE]; + VkConformanceVersion conformanceVersion; + VkShaderFloatControlsIndependence denormBehaviorIndependence; + VkShaderFloatControlsIndependence roundingModeIndependence; + VkBool32 shaderSignedZeroInfNanPreserveFloat16; + VkBool32 shaderSignedZeroInfNanPreserveFloat32; + VkBool32 shaderSignedZeroInfNanPreserveFloat64; + VkBool32 shaderDenormPreserveFloat16; + VkBool32 shaderDenormPreserveFloat32; + VkBool32 shaderDenormPreserveFloat64; + VkBool32 shaderDenormFlushToZeroFloat16; + VkBool32 shaderDenormFlushToZeroFloat32; + VkBool32 shaderDenormFlushToZeroFloat64; + VkBool32 shaderRoundingModeRTEFloat16; + VkBool32 shaderRoundingModeRTEFloat32; + VkBool32 shaderRoundingModeRTEFloat64; + VkBool32 shaderRoundingModeRTZFloat16; + VkBool32 shaderRoundingModeRTZFloat32; + VkBool32 shaderRoundingModeRTZFloat64; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; + VkResolveModeFlags supportedDepthResolveModes; + VkResolveModeFlags supportedStencilResolveModes; + VkBool32 independentResolveNone; + VkBool32 independentResolve; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; + uint64_t maxTimelineSemaphoreValueDifference; + VkSampleCountFlags framebufferIntegerColorSampleCounts; +} VkPhysicalDeviceVulkan12Properties; + +typedef struct VkImageFormatListCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkImageFormatListCreateInfo; + +typedef struct VkAttachmentDescription2 { + VkStructureType sType; + const void* pNext; + VkAttachmentDescriptionFlags flags; + VkFormat format; + VkSampleCountFlagBits samples; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkAttachmentLoadOp stencilLoadOp; + VkAttachmentStoreOp stencilStoreOp; + VkImageLayout initialLayout; + VkImageLayout finalLayout; +} VkAttachmentDescription2; + +typedef struct VkAttachmentReference2 { + VkStructureType sType; + const void* pNext; + uint32_t attachment; + VkImageLayout layout; + VkImageAspectFlags aspectMask; +} VkAttachmentReference2; + +typedef struct VkSubpassDescription2 { + VkStructureType sType; + const void* pNext; + VkSubpassDescriptionFlags flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t viewMask; + uint32_t inputAttachmentCount; + const VkAttachmentReference2* pInputAttachments; + uint32_t colorAttachmentCount; + const VkAttachmentReference2* pColorAttachments; + const VkAttachmentReference2* pResolveAttachments; + const VkAttachmentReference2* pDepthStencilAttachment; + uint32_t preserveAttachmentCount; + const uint32_t* pPreserveAttachments; +} VkSubpassDescription2; + +typedef struct VkSubpassDependency2 { + VkStructureType sType; + const void* pNext; + uint32_t srcSubpass; + uint32_t dstSubpass; + VkPipelineStageFlags srcStageMask; + VkPipelineStageFlags dstStageMask; + VkAccessFlags srcAccessMask; + VkAccessFlags dstAccessMask; + VkDependencyFlags dependencyFlags; + int32_t viewOffset; +} VkSubpassDependency2; + +typedef struct VkRenderPassCreateInfo2 { + VkStructureType sType; + const void* pNext; + VkRenderPassCreateFlags flags; + uint32_t attachmentCount; + const VkAttachmentDescription2* pAttachments; + uint32_t subpassCount; + const VkSubpassDescription2* pSubpasses; + uint32_t dependencyCount; + const VkSubpassDependency2* pDependencies; + uint32_t correlatedViewMaskCount; + const uint32_t* pCorrelatedViewMasks; +} VkRenderPassCreateInfo2; + +typedef struct VkSubpassBeginInfo { + VkStructureType sType; + const void* pNext; + VkSubpassContents contents; +} VkSubpassBeginInfo; + +typedef struct VkSubpassEndInfo { + VkStructureType sType; + const void* pNext; +} VkSubpassEndInfo; + +typedef struct VkPhysicalDevice8BitStorageFeatures { + VkStructureType sType; + void* pNext; + VkBool32 storageBuffer8BitAccess; + VkBool32 uniformAndStorageBuffer8BitAccess; + VkBool32 storagePushConstant8; +} VkPhysicalDevice8BitStorageFeatures; + +typedef struct VkPhysicalDeviceDriverProperties { + VkStructureType sType; + void* pNext; + VkDriverId driverID; + char driverName[VK_MAX_DRIVER_NAME_SIZE]; + char driverInfo[VK_MAX_DRIVER_INFO_SIZE]; + VkConformanceVersion conformanceVersion; +} VkPhysicalDeviceDriverProperties; + +typedef struct VkPhysicalDeviceShaderAtomicInt64Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferInt64Atomics; + VkBool32 shaderSharedInt64Atomics; +} VkPhysicalDeviceShaderAtomicInt64Features; + +typedef struct VkPhysicalDeviceShaderFloat16Int8Features { + VkStructureType sType; + void* pNext; + VkBool32 shaderFloat16; + VkBool32 shaderInt8; +} VkPhysicalDeviceShaderFloat16Int8Features; + +typedef struct VkPhysicalDeviceFloatControlsProperties { + VkStructureType sType; + void* pNext; + VkShaderFloatControlsIndependence denormBehaviorIndependence; + VkShaderFloatControlsIndependence roundingModeIndependence; + VkBool32 shaderSignedZeroInfNanPreserveFloat16; + VkBool32 shaderSignedZeroInfNanPreserveFloat32; + VkBool32 shaderSignedZeroInfNanPreserveFloat64; + VkBool32 shaderDenormPreserveFloat16; + VkBool32 shaderDenormPreserveFloat32; + VkBool32 shaderDenormPreserveFloat64; + VkBool32 shaderDenormFlushToZeroFloat16; + VkBool32 shaderDenormFlushToZeroFloat32; + VkBool32 shaderDenormFlushToZeroFloat64; + VkBool32 shaderRoundingModeRTEFloat16; + VkBool32 shaderRoundingModeRTEFloat32; + VkBool32 shaderRoundingModeRTEFloat64; + VkBool32 shaderRoundingModeRTZFloat16; + VkBool32 shaderRoundingModeRTZFloat32; + VkBool32 shaderRoundingModeRTZFloat64; +} VkPhysicalDeviceFloatControlsProperties; + +typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t bindingCount; + const VkDescriptorBindingFlags* pBindingFlags; +} VkDescriptorSetLayoutBindingFlagsCreateInfo; + +typedef struct VkPhysicalDeviceDescriptorIndexingFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderInputAttachmentArrayDynamicIndexing; + VkBool32 shaderUniformTexelBufferArrayDynamicIndexing; + VkBool32 shaderStorageTexelBufferArrayDynamicIndexing; + VkBool32 shaderUniformBufferArrayNonUniformIndexing; + VkBool32 shaderSampledImageArrayNonUniformIndexing; + VkBool32 shaderStorageBufferArrayNonUniformIndexing; + VkBool32 shaderStorageImageArrayNonUniformIndexing; + VkBool32 shaderInputAttachmentArrayNonUniformIndexing; + VkBool32 shaderUniformTexelBufferArrayNonUniformIndexing; + VkBool32 shaderStorageTexelBufferArrayNonUniformIndexing; + VkBool32 descriptorBindingUniformBufferUpdateAfterBind; + VkBool32 descriptorBindingSampledImageUpdateAfterBind; + VkBool32 descriptorBindingStorageImageUpdateAfterBind; + VkBool32 descriptorBindingStorageBufferUpdateAfterBind; + VkBool32 descriptorBindingUniformTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingStorageTexelBufferUpdateAfterBind; + VkBool32 descriptorBindingUpdateUnusedWhilePending; + VkBool32 descriptorBindingPartiallyBound; + VkBool32 descriptorBindingVariableDescriptorCount; + VkBool32 runtimeDescriptorArray; +} VkPhysicalDeviceDescriptorIndexingFeatures; + +typedef struct VkPhysicalDeviceDescriptorIndexingProperties { + VkStructureType sType; + void* pNext; + uint32_t maxUpdateAfterBindDescriptorsInAllPools; + VkBool32 shaderUniformBufferArrayNonUniformIndexingNative; + VkBool32 shaderSampledImageArrayNonUniformIndexingNative; + VkBool32 shaderStorageBufferArrayNonUniformIndexingNative; + VkBool32 shaderStorageImageArrayNonUniformIndexingNative; + VkBool32 shaderInputAttachmentArrayNonUniformIndexingNative; + VkBool32 robustBufferAccessUpdateAfterBind; + VkBool32 quadDivergentImplicitLod; + uint32_t maxPerStageDescriptorUpdateAfterBindSamplers; + uint32_t maxPerStageDescriptorUpdateAfterBindUniformBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageBuffers; + uint32_t maxPerStageDescriptorUpdateAfterBindSampledImages; + uint32_t maxPerStageDescriptorUpdateAfterBindStorageImages; + uint32_t maxPerStageDescriptorUpdateAfterBindInputAttachments; + uint32_t maxPerStageUpdateAfterBindResources; + uint32_t maxDescriptorSetUpdateAfterBindSamplers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffers; + uint32_t maxDescriptorSetUpdateAfterBindUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffers; + uint32_t maxDescriptorSetUpdateAfterBindStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindSampledImages; + uint32_t maxDescriptorSetUpdateAfterBindStorageImages; + uint32_t maxDescriptorSetUpdateAfterBindInputAttachments; +} VkPhysicalDeviceDescriptorIndexingProperties; + +typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfo { + VkStructureType sType; + const void* pNext; + uint32_t descriptorSetCount; + const uint32_t* pDescriptorCounts; +} VkDescriptorSetVariableDescriptorCountAllocateInfo; + +typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupport { + VkStructureType sType; + void* pNext; + uint32_t maxVariableDescriptorCount; +} VkDescriptorSetVariableDescriptorCountLayoutSupport; + +typedef struct VkSubpassDescriptionDepthStencilResolve { + VkStructureType sType; + const void* pNext; + VkResolveModeFlagBits depthResolveMode; + VkResolveModeFlagBits stencilResolveMode; + const VkAttachmentReference2* pDepthStencilResolveAttachment; +} VkSubpassDescriptionDepthStencilResolve; + +typedef struct VkPhysicalDeviceDepthStencilResolveProperties { + VkStructureType sType; + void* pNext; + VkResolveModeFlags supportedDepthResolveModes; + VkResolveModeFlags supportedStencilResolveModes; + VkBool32 independentResolveNone; + VkBool32 independentResolve; +} VkPhysicalDeviceDepthStencilResolveProperties; + +typedef struct VkPhysicalDeviceScalarBlockLayoutFeatures { + VkStructureType sType; + void* pNext; + VkBool32 scalarBlockLayout; +} VkPhysicalDeviceScalarBlockLayoutFeatures; + +typedef struct VkImageStencilUsageCreateInfo { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags stencilUsage; +} VkImageStencilUsageCreateInfo; + +typedef struct VkSamplerReductionModeCreateInfo { + VkStructureType sType; + const void* pNext; + VkSamplerReductionMode reductionMode; +} VkSamplerReductionModeCreateInfo; + +typedef struct VkPhysicalDeviceSamplerFilterMinmaxProperties { + VkStructureType sType; + void* pNext; + VkBool32 filterMinmaxSingleComponentFormats; + VkBool32 filterMinmaxImageComponentMapping; +} VkPhysicalDeviceSamplerFilterMinmaxProperties; + +typedef struct VkPhysicalDeviceVulkanMemoryModelFeatures { + VkStructureType sType; + void* pNext; + VkBool32 vulkanMemoryModel; + VkBool32 vulkanMemoryModelDeviceScope; + VkBool32 vulkanMemoryModelAvailabilityVisibilityChains; +} VkPhysicalDeviceVulkanMemoryModelFeatures; + +typedef struct VkPhysicalDeviceImagelessFramebufferFeatures { + VkStructureType sType; + void* pNext; + VkBool32 imagelessFramebuffer; +} VkPhysicalDeviceImagelessFramebufferFeatures; + +typedef struct VkFramebufferAttachmentImageInfo { + VkStructureType sType; + const void* pNext; + VkImageCreateFlags flags; + VkImageUsageFlags usage; + uint32_t width; + uint32_t height; + uint32_t layerCount; + uint32_t viewFormatCount; + const VkFormat* pViewFormats; +} VkFramebufferAttachmentImageInfo; + +typedef struct VkFramebufferAttachmentsCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t attachmentImageInfoCount; + const VkFramebufferAttachmentImageInfo* pAttachmentImageInfos; +} VkFramebufferAttachmentsCreateInfo; + +typedef struct VkRenderPassAttachmentBeginInfo { + VkStructureType sType; + const void* pNext; + uint32_t attachmentCount; + const VkImageView* pAttachments; +} VkRenderPassAttachmentBeginInfo; + +typedef struct VkPhysicalDeviceUniformBufferStandardLayoutFeatures { + VkStructureType sType; + void* pNext; + VkBool32 uniformBufferStandardLayout; +} VkPhysicalDeviceUniformBufferStandardLayoutFeatures; + +typedef struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupExtendedTypes; +} VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures; + +typedef struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures { + VkStructureType sType; + void* pNext; + VkBool32 separateDepthStencilLayouts; +} VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures; + +typedef struct VkAttachmentReferenceStencilLayout { + VkStructureType sType; + void* pNext; + VkImageLayout stencilLayout; +} VkAttachmentReferenceStencilLayout; + +typedef struct VkAttachmentDescriptionStencilLayout { + VkStructureType sType; + void* pNext; + VkImageLayout stencilInitialLayout; + VkImageLayout stencilFinalLayout; +} VkAttachmentDescriptionStencilLayout; + +typedef struct VkPhysicalDeviceHostQueryResetFeatures { + VkStructureType sType; + void* pNext; + VkBool32 hostQueryReset; +} VkPhysicalDeviceHostQueryResetFeatures; + +typedef struct VkPhysicalDeviceTimelineSemaphoreFeatures { + VkStructureType sType; + void* pNext; + VkBool32 timelineSemaphore; +} VkPhysicalDeviceTimelineSemaphoreFeatures; + +typedef struct VkPhysicalDeviceTimelineSemaphoreProperties { + VkStructureType sType; + void* pNext; + uint64_t maxTimelineSemaphoreValueDifference; +} VkPhysicalDeviceTimelineSemaphoreProperties; + +typedef struct VkSemaphoreTypeCreateInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreType semaphoreType; + uint64_t initialValue; +} VkSemaphoreTypeCreateInfo; + +typedef struct VkTimelineSemaphoreSubmitInfo { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreValueCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValueCount; + const uint64_t* pSignalSemaphoreValues; +} VkTimelineSemaphoreSubmitInfo; + +typedef struct VkSemaphoreWaitInfo { + VkStructureType sType; + const void* pNext; + VkSemaphoreWaitFlags flags; + uint32_t semaphoreCount; + const VkSemaphore* pSemaphores; + const uint64_t* pValues; +} VkSemaphoreWaitInfo; + +typedef struct VkSemaphoreSignalInfo { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + uint64_t value; +} VkSemaphoreSignalInfo; + +typedef struct VkPhysicalDeviceBufferDeviceAddressFeatures { + VkStructureType sType; + void* pNext; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; +} VkPhysicalDeviceBufferDeviceAddressFeatures; + +typedef struct VkBufferDeviceAddressInfo { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferDeviceAddressInfo; + +typedef struct VkBufferOpaqueCaptureAddressCreateInfo { + VkStructureType sType; + const void* pNext; + uint64_t opaqueCaptureAddress; +} VkBufferOpaqueCaptureAddressCreateInfo; + +typedef struct VkMemoryOpaqueCaptureAddressAllocateInfo { + VkStructureType sType; + const void* pNext; + uint64_t opaqueCaptureAddress; +} VkMemoryOpaqueCaptureAddressAllocateInfo; + +typedef struct VkDeviceMemoryOpaqueCaptureAddressInfo { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; +} VkDeviceMemoryOpaqueCaptureAddressInfo; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkResetQueryPool)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValue)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue); +typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphores)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphore)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddress)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2( + VkDevice device, + const VkRenderPassCreateInfo2* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfo* pSubpassBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo* pSubpassBeginInfo, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkResetQueryPool( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue( + VkDevice device, + VkSemaphore semaphore, + uint64_t* pValue); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores( + VkDevice device, + const VkSemaphoreWaitInfo* pWaitInfo, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore( + VkDevice device, + const VkSemaphoreSignalInfo* pSignalInfo); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddress( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress( + VkDevice device, + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +#endif + + +// VK_VERSION_1_3 is a preprocessor guard. Do not pass it to API calls. +#define VK_VERSION_1_3 1 +// Vulkan 1.3 version number +#define VK_API_VERSION_1_3 VK_MAKE_API_VERSION(0, 1, 3, 0)// Patch version should always be set to 0 + +typedef uint64_t VkFlags64; +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPrivateDataSlot) + +typedef enum VkPipelineCreationFeedbackFlagBits { + VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT = 0x00000001, + VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT = 0x00000002, + VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT = 0x00000004, + VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT, + VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT, + VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT, + VK_PIPELINE_CREATION_FEEDBACK_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkPipelineCreationFeedbackFlagBits; +typedef VkFlags VkPipelineCreationFeedbackFlags; + +typedef enum VkToolPurposeFlagBits { + VK_TOOL_PURPOSE_VALIDATION_BIT = 0x00000001, + VK_TOOL_PURPOSE_PROFILING_BIT = 0x00000002, + VK_TOOL_PURPOSE_TRACING_BIT = 0x00000004, + VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT = 0x00000008, + VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT = 0x00000010, + VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT = 0x00000020, + VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT = 0x00000040, + VK_TOOL_PURPOSE_VALIDATION_BIT_EXT = VK_TOOL_PURPOSE_VALIDATION_BIT, + VK_TOOL_PURPOSE_PROFILING_BIT_EXT = VK_TOOL_PURPOSE_PROFILING_BIT, + VK_TOOL_PURPOSE_TRACING_BIT_EXT = VK_TOOL_PURPOSE_TRACING_BIT, + VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT, + VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT, + VK_TOOL_PURPOSE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkToolPurposeFlagBits; +typedef VkFlags VkToolPurposeFlags; +typedef VkFlags VkPrivateDataSlotCreateFlags; +typedef VkFlags64 VkPipelineStageFlags2; + +// Flag bits for VkPipelineStageFlagBits2 +typedef VkFlags64 VkPipelineStageFlagBits2; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE = 0ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE_KHR = 0ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT = 0x00000001ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT = 0x00000002ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT = 0x00000004ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT = 0x00000008ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT = 0x00000040ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT = 0x00000080ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT = 0x00000100ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT = 0x00000200ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT = 0x00000800ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT = 0x00002000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT = 0x00004000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT = 0x00008000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT = 0x00010000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT = 0x100000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT = 0x200000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT = 0x400000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT = 0x800000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT = 0x1000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT = 0x2000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT = 0x4000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR = 0x04000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR = 0x08000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_EXT = 0x00020000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR = 0x00200000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV = 0x00200000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0x00080000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT = 0x00080000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT = 0x00100000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADER_BIT_HUAWEI = 0x8000000000ULL; +// VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI is a deprecated alias +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI = 0x8000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI = 0x10000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR = 0x10000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT = 0x40000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLUSTER_CULLING_SHADER_BIT_HUAWEI = 0x20000000000ULL; +static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV = 0x20000000ULL; + +typedef VkFlags64 VkAccessFlags2; + +// Flag bits for VkAccessFlagBits2 +typedef VkFlags64 VkAccessFlagBits2; +static const VkAccessFlagBits2 VK_ACCESS_2_NONE = 0ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_NONE_KHR = 0ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT = 0x00000001ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT = 0x00000002ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT = 0x00000008ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT = 0x00000010ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT = 0x00000020ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT = 0x00000040ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT = 0x00000080ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT = 0x00000800ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT = 0x00001000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT = 0x00002000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT = 0x00004000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT = 0x00008000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT = 0x00010000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT = 0x100000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT = 0x200000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT = 0x400000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR = 0x800000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR = 0x1000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR = 0x2000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR = 0x4000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_EXT = 0x00020000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_EXT = 0x00040000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT = 0x20000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI = 0x8000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR = 0x10000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_READ_BIT_EXT = 0x100000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT = 0x200000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV = 0x40000000000ULL; +static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV = 0x80000000000ULL; + + +typedef enum VkSubmitFlagBits { + VK_SUBMIT_PROTECTED_BIT = 0x00000001, + VK_SUBMIT_PROTECTED_BIT_KHR = VK_SUBMIT_PROTECTED_BIT, + VK_SUBMIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkSubmitFlagBits; +typedef VkFlags VkSubmitFlags; + +typedef enum VkRenderingFlagBits { + VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT = 0x00000001, + VK_RENDERING_SUSPENDING_BIT = 0x00000002, + VK_RENDERING_RESUMING_BIT = 0x00000004, + VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000008, + VK_RENDERING_CONTENTS_INLINE_BIT_KHR = 0x00000010, + VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT_KHR = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT, + VK_RENDERING_SUSPENDING_BIT_KHR = VK_RENDERING_SUSPENDING_BIT, + VK_RENDERING_RESUMING_BIT_KHR = VK_RENDERING_RESUMING_BIT, + VK_RENDERING_CONTENTS_INLINE_BIT_EXT = VK_RENDERING_CONTENTS_INLINE_BIT_KHR, + VK_RENDERING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF +} VkRenderingFlagBits; +typedef VkFlags VkRenderingFlags; +typedef VkFlags64 VkFormatFeatureFlags2; + +// Flag bits for VkFormatFeatureFlagBits2 +typedef VkFlags64 VkFormatFeatureFlagBits2; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT = 0x00000001ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR = 0x00000001ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT = 0x00000002ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR = 0x00000002ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT_KHR = 0x00000004ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT = 0x00000010ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000010ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT_KHR = 0x00000020ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT = 0x00000040ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT_KHR = 0x00000040ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT = 0x00000080ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR = 0x00000080ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT_KHR = 0x00000100ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR = 0x00000200ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT = 0x00000400ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR = 0x00000400ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT = 0x00000800ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR = 0x00000800ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT_KHR = 0x00001000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT = 0x00002000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT = 0x00004000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR = 0x00004000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT = 0x00008000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR = 0x00008000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT_KHR = 0x00010000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = 0x00020000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = 0x00040000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = 0x00080000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = 0x00100000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = 0x00200000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT = 0x00400000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR = 0x00400000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT = 0x00800000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT_KHR = 0x00800000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT = 0x80000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR = 0x80000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT = 0x100000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR = 0x100000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT = 0x200000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR = 0x200000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_HOST_IMAGE_TRANSFER_BIT_EXT = 0x400000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV = 0x4000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM = 0x400000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM = 0x800000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM = 0x1000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM = 0x2000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV = 0x10000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV = 0x20000000000ULL; +static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV = 0x40000000000ULL; + +typedef struct VkPhysicalDeviceVulkan13Features { + VkStructureType sType; + void* pNext; + VkBool32 robustImageAccess; + VkBool32 inlineUniformBlock; + VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind; + VkBool32 pipelineCreationCacheControl; + VkBool32 privateData; + VkBool32 shaderDemoteToHelperInvocation; + VkBool32 shaderTerminateInvocation; + VkBool32 subgroupSizeControl; + VkBool32 computeFullSubgroups; + VkBool32 synchronization2; + VkBool32 textureCompressionASTC_HDR; + VkBool32 shaderZeroInitializeWorkgroupMemory; + VkBool32 dynamicRendering; + VkBool32 shaderIntegerDotProduct; + VkBool32 maintenance4; +} VkPhysicalDeviceVulkan13Features; + +typedef struct VkPhysicalDeviceVulkan13Properties { + VkStructureType sType; + void* pNext; + uint32_t minSubgroupSize; + uint32_t maxSubgroupSize; + uint32_t maxComputeWorkgroupSubgroups; + VkShaderStageFlags requiredSubgroupSizeStages; + uint32_t maxInlineUniformBlockSize; + uint32_t maxPerStageDescriptorInlineUniformBlocks; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; + uint32_t maxDescriptorSetInlineUniformBlocks; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; + uint32_t maxInlineUniformTotalSize; + VkBool32 integerDotProduct8BitUnsignedAccelerated; + VkBool32 integerDotProduct8BitSignedAccelerated; + VkBool32 integerDotProduct8BitMixedSignednessAccelerated; + VkBool32 integerDotProduct4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedSignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProduct16BitUnsignedAccelerated; + VkBool32 integerDotProduct16BitSignedAccelerated; + VkBool32 integerDotProduct16BitMixedSignednessAccelerated; + VkBool32 integerDotProduct32BitUnsignedAccelerated; + VkBool32 integerDotProduct32BitSignedAccelerated; + VkBool32 integerDotProduct32BitMixedSignednessAccelerated; + VkBool32 integerDotProduct64BitUnsignedAccelerated; + VkBool32 integerDotProduct64BitSignedAccelerated; + VkBool32 integerDotProduct64BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated; + VkDeviceSize storageTexelBufferOffsetAlignmentBytes; + VkBool32 storageTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize uniformTexelBufferOffsetAlignmentBytes; + VkBool32 uniformTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize maxBufferSize; +} VkPhysicalDeviceVulkan13Properties; + +typedef struct VkPipelineCreationFeedback { + VkPipelineCreationFeedbackFlags flags; + uint64_t duration; +} VkPipelineCreationFeedback; + +typedef struct VkPipelineCreationFeedbackCreateInfo { + VkStructureType sType; + const void* pNext; + VkPipelineCreationFeedback* pPipelineCreationFeedback; + uint32_t pipelineStageCreationFeedbackCount; + VkPipelineCreationFeedback* pPipelineStageCreationFeedbacks; +} VkPipelineCreationFeedbackCreateInfo; + +typedef struct VkPhysicalDeviceShaderTerminateInvocationFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderTerminateInvocation; +} VkPhysicalDeviceShaderTerminateInvocationFeatures; + +typedef struct VkPhysicalDeviceToolProperties { + VkStructureType sType; + void* pNext; + char name[VK_MAX_EXTENSION_NAME_SIZE]; + char version[VK_MAX_EXTENSION_NAME_SIZE]; + VkToolPurposeFlags purposes; + char description[VK_MAX_DESCRIPTION_SIZE]; + char layer[VK_MAX_EXTENSION_NAME_SIZE]; +} VkPhysicalDeviceToolProperties; + +typedef struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderDemoteToHelperInvocation; +} VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures; + +typedef struct VkPhysicalDevicePrivateDataFeatures { + VkStructureType sType; + void* pNext; + VkBool32 privateData; +} VkPhysicalDevicePrivateDataFeatures; + +typedef struct VkDevicePrivateDataCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t privateDataSlotRequestCount; +} VkDevicePrivateDataCreateInfo; + +typedef struct VkPrivateDataSlotCreateInfo { + VkStructureType sType; + const void* pNext; + VkPrivateDataSlotCreateFlags flags; +} VkPrivateDataSlotCreateInfo; + +typedef struct VkPhysicalDevicePipelineCreationCacheControlFeatures { + VkStructureType sType; + void* pNext; + VkBool32 pipelineCreationCacheControl; +} VkPhysicalDevicePipelineCreationCacheControlFeatures; + +typedef struct VkMemoryBarrier2 { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2 srcStageMask; + VkAccessFlags2 srcAccessMask; + VkPipelineStageFlags2 dstStageMask; + VkAccessFlags2 dstAccessMask; +} VkMemoryBarrier2; + +typedef struct VkBufferMemoryBarrier2 { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2 srcStageMask; + VkAccessFlags2 srcAccessMask; + VkPipelineStageFlags2 dstStageMask; + VkAccessFlags2 dstAccessMask; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; +} VkBufferMemoryBarrier2; + +typedef struct VkImageMemoryBarrier2 { + VkStructureType sType; + const void* pNext; + VkPipelineStageFlags2 srcStageMask; + VkAccessFlags2 srcAccessMask; + VkPipelineStageFlags2 dstStageMask; + VkAccessFlags2 dstAccessMask; + VkImageLayout oldLayout; + VkImageLayout newLayout; + uint32_t srcQueueFamilyIndex; + uint32_t dstQueueFamilyIndex; + VkImage image; + VkImageSubresourceRange subresourceRange; +} VkImageMemoryBarrier2; + +typedef struct VkDependencyInfo { + VkStructureType sType; + const void* pNext; + VkDependencyFlags dependencyFlags; + uint32_t memoryBarrierCount; + const VkMemoryBarrier2* pMemoryBarriers; + uint32_t bufferMemoryBarrierCount; + const VkBufferMemoryBarrier2* pBufferMemoryBarriers; + uint32_t imageMemoryBarrierCount; + const VkImageMemoryBarrier2* pImageMemoryBarriers; +} VkDependencyInfo; + +typedef struct VkSemaphoreSubmitInfo { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + uint64_t value; + VkPipelineStageFlags2 stageMask; + uint32_t deviceIndex; +} VkSemaphoreSubmitInfo; + +typedef struct VkCommandBufferSubmitInfo { + VkStructureType sType; + const void* pNext; + VkCommandBuffer commandBuffer; + uint32_t deviceMask; +} VkCommandBufferSubmitInfo; + +typedef struct VkSubmitInfo2 { + VkStructureType sType; + const void* pNext; + VkSubmitFlags flags; + uint32_t waitSemaphoreInfoCount; + const VkSemaphoreSubmitInfo* pWaitSemaphoreInfos; + uint32_t commandBufferInfoCount; + const VkCommandBufferSubmitInfo* pCommandBufferInfos; + uint32_t signalSemaphoreInfoCount; + const VkSemaphoreSubmitInfo* pSignalSemaphoreInfos; +} VkSubmitInfo2; + +typedef struct VkPhysicalDeviceSynchronization2Features { + VkStructureType sType; + void* pNext; + VkBool32 synchronization2; +} VkPhysicalDeviceSynchronization2Features; + +typedef struct VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderZeroInitializeWorkgroupMemory; +} VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures; + +typedef struct VkPhysicalDeviceImageRobustnessFeatures { + VkStructureType sType; + void* pNext; + VkBool32 robustImageAccess; +} VkPhysicalDeviceImageRobustnessFeatures; + +typedef struct VkBufferCopy2 { + VkStructureType sType; + const void* pNext; + VkDeviceSize srcOffset; + VkDeviceSize dstOffset; + VkDeviceSize size; +} VkBufferCopy2; + +typedef struct VkCopyBufferInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferCopy2* pRegions; +} VkCopyBufferInfo2; + +typedef struct VkImageCopy2 { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageCopy2; + +typedef struct VkCopyImageInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2* pRegions; +} VkCopyImageInfo2; + +typedef struct VkBufferImageCopy2 { + VkStructureType sType; + const void* pNext; + VkDeviceSize bufferOffset; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkBufferImageCopy2; + +typedef struct VkCopyBufferToImageInfo2 { + VkStructureType sType; + const void* pNext; + VkBuffer srcBuffer; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkBufferImageCopy2* pRegions; +} VkCopyBufferToImageInfo2; + +typedef struct VkCopyImageToBufferInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkBuffer dstBuffer; + uint32_t regionCount; + const VkBufferImageCopy2* pRegions; +} VkCopyImageToBufferInfo2; + +typedef struct VkImageBlit2 { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffsets[2]; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffsets[2]; +} VkImageBlit2; + +typedef struct VkBlitImageInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageBlit2* pRegions; + VkFilter filter; +} VkBlitImageInfo2; + +typedef struct VkImageResolve2 { + VkStructureType sType; + const void* pNext; + VkImageSubresourceLayers srcSubresource; + VkOffset3D srcOffset; + VkImageSubresourceLayers dstSubresource; + VkOffset3D dstOffset; + VkExtent3D extent; +} VkImageResolve2; + +typedef struct VkResolveImageInfo2 { + VkStructureType sType; + const void* pNext; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageResolve2* pRegions; +} VkResolveImageInfo2; + +typedef struct VkPhysicalDeviceSubgroupSizeControlFeatures { + VkStructureType sType; + void* pNext; + VkBool32 subgroupSizeControl; + VkBool32 computeFullSubgroups; +} VkPhysicalDeviceSubgroupSizeControlFeatures; + +typedef struct VkPhysicalDeviceSubgroupSizeControlProperties { + VkStructureType sType; + void* pNext; + uint32_t minSubgroupSize; + uint32_t maxSubgroupSize; + uint32_t maxComputeWorkgroupSubgroups; + VkShaderStageFlags requiredSubgroupSizeStages; +} VkPhysicalDeviceSubgroupSizeControlProperties; + +typedef struct VkPipelineShaderStageRequiredSubgroupSizeCreateInfo { + VkStructureType sType; + void* pNext; + uint32_t requiredSubgroupSize; +} VkPipelineShaderStageRequiredSubgroupSizeCreateInfo; + +typedef struct VkPhysicalDeviceInlineUniformBlockFeatures { + VkStructureType sType; + void* pNext; + VkBool32 inlineUniformBlock; + VkBool32 descriptorBindingInlineUniformBlockUpdateAfterBind; +} VkPhysicalDeviceInlineUniformBlockFeatures; + +typedef struct VkPhysicalDeviceInlineUniformBlockProperties { + VkStructureType sType; + void* pNext; + uint32_t maxInlineUniformBlockSize; + uint32_t maxPerStageDescriptorInlineUniformBlocks; + uint32_t maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks; + uint32_t maxDescriptorSetInlineUniformBlocks; + uint32_t maxDescriptorSetUpdateAfterBindInlineUniformBlocks; +} VkPhysicalDeviceInlineUniformBlockProperties; + +typedef struct VkWriteDescriptorSetInlineUniformBlock { + VkStructureType sType; + const void* pNext; + uint32_t dataSize; + const void* pData; +} VkWriteDescriptorSetInlineUniformBlock; + +typedef struct VkDescriptorPoolInlineUniformBlockCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t maxInlineUniformBlockBindings; +} VkDescriptorPoolInlineUniformBlockCreateInfo; + +typedef struct VkPhysicalDeviceTextureCompressionASTCHDRFeatures { + VkStructureType sType; + void* pNext; + VkBool32 textureCompressionASTC_HDR; +} VkPhysicalDeviceTextureCompressionASTCHDRFeatures; + +typedef struct VkRenderingAttachmentInfo { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; + VkResolveModeFlagBits resolveMode; + VkImageView resolveImageView; + VkImageLayout resolveImageLayout; + VkAttachmentLoadOp loadOp; + VkAttachmentStoreOp storeOp; + VkClearValue clearValue; +} VkRenderingAttachmentInfo; + +typedef struct VkRenderingInfo { + VkStructureType sType; + const void* pNext; + VkRenderingFlags flags; + VkRect2D renderArea; + uint32_t layerCount; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkRenderingAttachmentInfo* pColorAttachments; + const VkRenderingAttachmentInfo* pDepthAttachment; + const VkRenderingAttachmentInfo* pStencilAttachment; +} VkRenderingInfo; + +typedef struct VkPipelineRenderingCreateInfo { + VkStructureType sType; + const void* pNext; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; +} VkPipelineRenderingCreateInfo; + +typedef struct VkPhysicalDeviceDynamicRenderingFeatures { + VkStructureType sType; + void* pNext; + VkBool32 dynamicRendering; +} VkPhysicalDeviceDynamicRenderingFeatures; + +typedef struct VkCommandBufferInheritanceRenderingInfo { + VkStructureType sType; + const void* pNext; + VkRenderingFlags flags; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; + VkSampleCountFlagBits rasterizationSamples; +} VkCommandBufferInheritanceRenderingInfo; + +typedef struct VkPhysicalDeviceShaderIntegerDotProductFeatures { + VkStructureType sType; + void* pNext; + VkBool32 shaderIntegerDotProduct; +} VkPhysicalDeviceShaderIntegerDotProductFeatures; + +typedef struct VkPhysicalDeviceShaderIntegerDotProductProperties { + VkStructureType sType; + void* pNext; + VkBool32 integerDotProduct8BitUnsignedAccelerated; + VkBool32 integerDotProduct8BitSignedAccelerated; + VkBool32 integerDotProduct8BitMixedSignednessAccelerated; + VkBool32 integerDotProduct4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedSignedAccelerated; + VkBool32 integerDotProduct4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProduct16BitUnsignedAccelerated; + VkBool32 integerDotProduct16BitSignedAccelerated; + VkBool32 integerDotProduct16BitMixedSignednessAccelerated; + VkBool32 integerDotProduct32BitUnsignedAccelerated; + VkBool32 integerDotProduct32BitSignedAccelerated; + VkBool32 integerDotProduct32BitMixedSignednessAccelerated; + VkBool32 integerDotProduct64BitUnsignedAccelerated; + VkBool32 integerDotProduct64BitSignedAccelerated; + VkBool32 integerDotProduct64BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitUnsignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitSignedAccelerated; + VkBool32 integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated; +} VkPhysicalDeviceShaderIntegerDotProductProperties; + +typedef struct VkPhysicalDeviceTexelBufferAlignmentProperties { + VkStructureType sType; + void* pNext; + VkDeviceSize storageTexelBufferOffsetAlignmentBytes; + VkBool32 storageTexelBufferOffsetSingleTexelAlignment; + VkDeviceSize uniformTexelBufferOffsetAlignmentBytes; + VkBool32 uniformTexelBufferOffsetSingleTexelAlignment; +} VkPhysicalDeviceTexelBufferAlignmentProperties; + +typedef struct VkFormatProperties3 { + VkStructureType sType; + void* pNext; + VkFormatFeatureFlags2 linearTilingFeatures; + VkFormatFeatureFlags2 optimalTilingFeatures; + VkFormatFeatureFlags2 bufferFeatures; +} VkFormatProperties3; + +typedef struct VkPhysicalDeviceMaintenance4Features { + VkStructureType sType; + void* pNext; + VkBool32 maintenance4; +} VkPhysicalDeviceMaintenance4Features; + +typedef struct VkPhysicalDeviceMaintenance4Properties { + VkStructureType sType; + void* pNext; + VkDeviceSize maxBufferSize; +} VkPhysicalDeviceMaintenance4Properties; + +typedef struct VkDeviceBufferMemoryRequirements { + VkStructureType sType; + const void* pNext; + const VkBufferCreateInfo* pCreateInfo; +} VkDeviceBufferMemoryRequirements; + +typedef struct VkDeviceImageMemoryRequirements { + VkStructureType sType; + const void* pNext; + const VkImageCreateInfo* pCreateInfo; + VkImageAspectFlagBits planeAspect; +} VkDeviceImageMemoryRequirements; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolProperties)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlot)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot); +typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlot)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data); +typedef void (VKAPI_PTR *PFN_vkGetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData); +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRendering)(VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRendering)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdSetCullMode)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFace)(VkCommandBuffer commandBuffer, VkFrontFace frontFace); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopology)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCount)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCount)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnable)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnable)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOp)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnable)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnable)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOp)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnable)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnable)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnable)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); +typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirements)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirements)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirements)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolProperties( + VkPhysicalDevice physicalDevice, + uint32_t* pToolCount, + VkPhysicalDeviceToolProperties* pToolProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlot( + VkDevice device, + const VkPrivateDataSlotCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlot* pPrivateDataSlot); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlot( + VkDevice device, + VkPrivateDataSlot privateDataSlot, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateData( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t data); + +VKAPI_ATTR void VKAPI_CALL vkGetPrivateData( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2( + VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2 stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfo* pDependencyInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2( + VkCommandBuffer commandBuffer, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2* pSubmits, + VkFence fence); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2* pCopyBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2* pCopyImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2* pBlitImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2* pResolveImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRendering( + VkCommandBuffer commandBuffer, + const VkRenderingInfo* pRenderingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRendering( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCullMode( + VkCommandBuffer commandBuffer, + VkCullModeFlags cullMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFace( + VkCommandBuffer commandBuffer, + VkFrontFace frontFace); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopology( + VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCount( + VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCount( + VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOp( + VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnable( + VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOp( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnable( + VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnable( + VkCommandBuffer commandBuffer, + VkBool32 depthBiasEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnable( + VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirements( + VkDevice device, + const VkDeviceBufferMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirements( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirements( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + + +// VK_KHR_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_surface 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) +#define VK_KHR_SURFACE_SPEC_VERSION 25 +#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" + +typedef enum VkPresentModeKHR { + VK_PRESENT_MODE_IMMEDIATE_KHR = 0, + VK_PRESENT_MODE_MAILBOX_KHR = 1, + VK_PRESENT_MODE_FIFO_KHR = 2, + VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, + VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, + VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, + VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPresentModeKHR; + +typedef enum VkColorSpaceKHR { + VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, + VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, + VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, + VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT = 1000104003, + VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, + VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, + VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, + VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, + VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, + // VK_COLOR_SPACE_DOLBYVISION_EXT is deprecated, but no reason was given in the API XML + VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, + VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, + VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, + VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012, + VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, + VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, + VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000, + // VK_COLORSPACE_SRGB_NONLINEAR_KHR is a deprecated alias + VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, + // VK_COLOR_SPACE_DCI_P3_LINEAR_EXT is a deprecated alias + VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT, + VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkColorSpaceKHR; + +typedef enum VkSurfaceTransformFlagBitsKHR { + VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, + VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, + VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, + VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, + VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, + VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, + VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSurfaceTransformFlagBitsKHR; + +typedef enum VkCompositeAlphaFlagBitsKHR { + VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, + VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, + VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, + VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCompositeAlphaFlagBitsKHR; +typedef VkFlags VkCompositeAlphaFlagsKHR; +typedef VkFlags VkSurfaceTransformFlagsKHR; +typedef struct VkSurfaceCapabilitiesKHR { + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; +} VkSurfaceCapabilitiesKHR; + +typedef struct VkSurfaceFormatKHR { + VkFormat format; + VkColorSpaceKHR colorSpace; +} VkSurfaceFormatKHR; + +typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( + VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32* pSupported); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormatKHR* pSurfaceFormats); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); +#endif + + +// VK_KHR_swapchain is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_swapchain 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) +#define VK_KHR_SWAPCHAIN_SPEC_VERSION 70 +#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" + +typedef enum VkSwapchainCreateFlagBitsKHR { + VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001, + VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002, + VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR = 0x00000004, + VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT = 0x00000008, + VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkSwapchainCreateFlagBitsKHR; +typedef VkFlags VkSwapchainCreateFlagsKHR; + +typedef enum VkDeviceGroupPresentModeFlagBitsKHR { + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001, + VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002, + VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004, + VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008, + VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDeviceGroupPresentModeFlagBitsKHR; +typedef VkFlags VkDeviceGroupPresentModeFlagsKHR; +typedef struct VkSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainCreateFlagsKHR flags; + VkSurfaceKHR surface; + uint32_t minImageCount; + VkFormat imageFormat; + VkColorSpaceKHR imageColorSpace; + VkExtent2D imageExtent; + uint32_t imageArrayLayers; + VkImageUsageFlags imageUsage; + VkSharingMode imageSharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; + VkSurfaceTransformFlagBitsKHR preTransform; + VkCompositeAlphaFlagBitsKHR compositeAlpha; + VkPresentModeKHR presentMode; + VkBool32 clipped; + VkSwapchainKHR oldSwapchain; +} VkSwapchainCreateInfoKHR; + +typedef struct VkPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreCount; + const VkSemaphore* pWaitSemaphores; + uint32_t swapchainCount; + const VkSwapchainKHR* pSwapchains; + const uint32_t* pImageIndices; + VkResult* pResults; +} VkPresentInfoKHR; + +typedef struct VkImageSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; +} VkImageSwapchainCreateInfoKHR; + +typedef struct VkBindImageMemorySwapchainInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndex; +} VkBindImageMemorySwapchainInfoKHR; + +typedef struct VkAcquireNextImageInfoKHR { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint64_t timeout; + VkSemaphore semaphore; + VkFence fence; + uint32_t deviceMask; +} VkAcquireNextImageInfoKHR; + +typedef struct VkDeviceGroupPresentCapabilitiesKHR { + VkStructureType sType; + void* pNext; + uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE]; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupPresentCapabilitiesKHR; + +typedef struct VkDeviceGroupPresentInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint32_t* pDeviceMasks; + VkDeviceGroupPresentModeFlagBitsKHR mode; +} VkDeviceGroupPresentInfoKHR; + +typedef struct VkDeviceGroupSwapchainCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceGroupPresentModeFlagsKHR modes; +} VkDeviceGroupSwapchainCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain); +typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex); +typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( + VkDevice device, + const VkSwapchainCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchain); + +VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( + VkDevice device, + VkSwapchainKHR swapchain, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pSwapchainImageCount, + VkImage* pSwapchainImages); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t timeout, + VkSemaphore semaphore, + VkFence fence, + uint32_t* pImageIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( + VkQueue queue, + const VkPresentInfoKHR* pPresentInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR( + VkDevice device, + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR( + VkDevice device, + VkSurfaceKHR surface, + VkDeviceGroupPresentModeFlagsKHR* pModes); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + uint32_t* pRectCount, + VkRect2D* pRects); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR( + VkDevice device, + const VkAcquireNextImageInfoKHR* pAcquireInfo, + uint32_t* pImageIndex); +#endif + + +// VK_KHR_display is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_display 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) +#define VK_KHR_DISPLAY_SPEC_VERSION 23 +#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" +typedef VkFlags VkDisplayModeCreateFlagsKHR; + +typedef enum VkDisplayPlaneAlphaFlagBitsKHR { + VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, + VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, + VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, + VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkDisplayPlaneAlphaFlagBitsKHR; +typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; +typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; +typedef struct VkDisplayModeParametersKHR { + VkExtent2D visibleRegion; + uint32_t refreshRate; +} VkDisplayModeParametersKHR; + +typedef struct VkDisplayModeCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeCreateFlagsKHR flags; + VkDisplayModeParametersKHR parameters; +} VkDisplayModeCreateInfoKHR; + +typedef struct VkDisplayModePropertiesKHR { + VkDisplayModeKHR displayMode; + VkDisplayModeParametersKHR parameters; +} VkDisplayModePropertiesKHR; + +typedef struct VkDisplayPlaneCapabilitiesKHR { + VkDisplayPlaneAlphaFlagsKHR supportedAlpha; + VkOffset2D minSrcPosition; + VkOffset2D maxSrcPosition; + VkExtent2D minSrcExtent; + VkExtent2D maxSrcExtent; + VkOffset2D minDstPosition; + VkOffset2D maxDstPosition; + VkExtent2D minDstExtent; + VkExtent2D maxDstExtent; +} VkDisplayPlaneCapabilitiesKHR; + +typedef struct VkDisplayPlanePropertiesKHR { + VkDisplayKHR currentDisplay; + uint32_t currentStackIndex; +} VkDisplayPlanePropertiesKHR; + +typedef struct VkDisplayPropertiesKHR { + VkDisplayKHR display; + const char* displayName; + VkExtent2D physicalDimensions; + VkExtent2D physicalResolution; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkBool32 planeReorderPossible; + VkBool32 persistentContent; +} VkDisplayPropertiesKHR; + +typedef struct VkDisplaySurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDisplaySurfaceCreateFlagsKHR flags; + VkDisplayModeKHR displayMode; + uint32_t planeIndex; + uint32_t planeStackIndex; + VkSurfaceTransformFlagBitsKHR transform; + float globalAlpha; + VkDisplayPlaneAlphaFlagBitsKHR alphaMode; + VkExtent2D imageExtent; +} VkDisplaySurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlanePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( + VkPhysicalDevice physicalDevice, + uint32_t planeIndex, + uint32_t* pDisplayCount, + VkDisplayKHR* pDisplays); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + const VkDisplayModeCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDisplayModeKHR* pMode); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + VkDisplayModeKHR mode, + uint32_t planeIndex, + VkDisplayPlaneCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( + VkInstance instance, + const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_KHR_display_swapchain is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_display_swapchain 1 +#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 10 +#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" +typedef struct VkDisplayPresentInfoKHR { + VkStructureType sType; + const void* pNext; + VkRect2D srcRect; + VkRect2D dstRect; + VkBool32 persistent; +} VkDisplayPresentInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkSwapchainKHR* pSwapchains); +#endif + + +// VK_KHR_sampler_mirror_clamp_to_edge is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_sampler_mirror_clamp_to_edge 1 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 3 +#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" + + +// VK_KHR_video_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_queue 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR) +#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION 8 +#define VK_KHR_VIDEO_QUEUE_EXTENSION_NAME "VK_KHR_video_queue" + +typedef enum VkQueryResultStatusKHR { + VK_QUERY_RESULT_STATUS_ERROR_KHR = -1, + VK_QUERY_RESULT_STATUS_NOT_READY_KHR = 0, + VK_QUERY_RESULT_STATUS_COMPLETE_KHR = 1, + VK_QUERY_RESULT_STATUS_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_KHR = -1000299000, + VK_QUERY_RESULT_STATUS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkQueryResultStatusKHR; + +typedef enum VkVideoCodecOperationFlagBitsKHR { + VK_VIDEO_CODEC_OPERATION_NONE_KHR = 0, + VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR = 0x00010000, + VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR = 0x00020000, + VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR = 0x00000001, + VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR = 0x00000002, + VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR = 0x00000004, + VK_VIDEO_CODEC_OPERATION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodecOperationFlagBitsKHR; +typedef VkFlags VkVideoCodecOperationFlagsKHR; + +typedef enum VkVideoChromaSubsamplingFlagBitsKHR { + VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR = 0, + VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR = 0x00000001, + VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR = 0x00000002, + VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR = 0x00000004, + VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR = 0x00000008, + VK_VIDEO_CHROMA_SUBSAMPLING_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoChromaSubsamplingFlagBitsKHR; +typedef VkFlags VkVideoChromaSubsamplingFlagsKHR; + +typedef enum VkVideoComponentBitDepthFlagBitsKHR { + VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR = 0, + VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR = 0x00000001, + VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR = 0x00000004, + VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR = 0x00000010, + VK_VIDEO_COMPONENT_BIT_DEPTH_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoComponentBitDepthFlagBitsKHR; +typedef VkFlags VkVideoComponentBitDepthFlagsKHR; + +typedef enum VkVideoCapabilityFlagBitsKHR { + VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR = 0x00000002, + VK_VIDEO_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCapabilityFlagBitsKHR; +typedef VkFlags VkVideoCapabilityFlagsKHR; + +typedef enum VkVideoSessionCreateFlagBitsKHR { + VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR = 0x00000001, + VK_VIDEO_SESSION_CREATE_ALLOW_ENCODE_PARAMETER_OPTIMIZATIONS_BIT_KHR = 0x00000002, + VK_VIDEO_SESSION_CREATE_INLINE_QUERIES_BIT_KHR = 0x00000004, + VK_VIDEO_SESSION_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoSessionCreateFlagBitsKHR; +typedef VkFlags VkVideoSessionCreateFlagsKHR; +typedef VkFlags VkVideoSessionParametersCreateFlagsKHR; +typedef VkFlags VkVideoBeginCodingFlagsKHR; +typedef VkFlags VkVideoEndCodingFlagsKHR; + +typedef enum VkVideoCodingControlFlagBitsKHR { + VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR = 0x00000001, + VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR = 0x00000002, + VK_VIDEO_CODING_CONTROL_ENCODE_QUALITY_LEVEL_BIT_KHR = 0x00000004, + VK_VIDEO_CODING_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoCodingControlFlagBitsKHR; +typedef VkFlags VkVideoCodingControlFlagsKHR; +typedef struct VkQueueFamilyQueryResultStatusPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 queryResultStatusSupport; +} VkQueueFamilyQueryResultStatusPropertiesKHR; + +typedef struct VkQueueFamilyVideoPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoCodecOperationFlagsKHR videoCodecOperations; +} VkQueueFamilyVideoPropertiesKHR; + +typedef struct VkVideoProfileInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoCodecOperationFlagBitsKHR videoCodecOperation; + VkVideoChromaSubsamplingFlagsKHR chromaSubsampling; + VkVideoComponentBitDepthFlagsKHR lumaBitDepth; + VkVideoComponentBitDepthFlagsKHR chromaBitDepth; +} VkVideoProfileInfoKHR; + +typedef struct VkVideoProfileListInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t profileCount; + const VkVideoProfileInfoKHR* pProfiles; +} VkVideoProfileListInfoKHR; + +typedef struct VkVideoCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoCapabilityFlagsKHR flags; + VkDeviceSize minBitstreamBufferOffsetAlignment; + VkDeviceSize minBitstreamBufferSizeAlignment; + VkExtent2D pictureAccessGranularity; + VkExtent2D minCodedExtent; + VkExtent2D maxCodedExtent; + uint32_t maxDpbSlots; + uint32_t maxActiveReferencePictures; + VkExtensionProperties stdHeaderVersion; +} VkVideoCapabilitiesKHR; + +typedef struct VkPhysicalDeviceVideoFormatInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageUsageFlags imageUsage; +} VkPhysicalDeviceVideoFormatInfoKHR; + +typedef struct VkVideoFormatPropertiesKHR { + VkStructureType sType; + void* pNext; + VkFormat format; + VkComponentMapping componentMapping; + VkImageCreateFlags imageCreateFlags; + VkImageType imageType; + VkImageTiling imageTiling; + VkImageUsageFlags imageUsageFlags; +} VkVideoFormatPropertiesKHR; + +typedef struct VkVideoPictureResourceInfoKHR { + VkStructureType sType; + const void* pNext; + VkOffset2D codedOffset; + VkExtent2D codedExtent; + uint32_t baseArrayLayer; + VkImageView imageViewBinding; +} VkVideoPictureResourceInfoKHR; + +typedef struct VkVideoReferenceSlotInfoKHR { + VkStructureType sType; + const void* pNext; + int32_t slotIndex; + const VkVideoPictureResourceInfoKHR* pPictureResource; +} VkVideoReferenceSlotInfoKHR; + +typedef struct VkVideoSessionMemoryRequirementsKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryBindIndex; + VkMemoryRequirements memoryRequirements; +} VkVideoSessionMemoryRequirementsKHR; + +typedef struct VkBindVideoSessionMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t memoryBindIndex; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + VkDeviceSize memorySize; +} VkBindVideoSessionMemoryInfoKHR; + +typedef struct VkVideoSessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t queueFamilyIndex; + VkVideoSessionCreateFlagsKHR flags; + const VkVideoProfileInfoKHR* pVideoProfile; + VkFormat pictureFormat; + VkExtent2D maxCodedExtent; + VkFormat referencePictureFormat; + uint32_t maxDpbSlots; + uint32_t maxActiveReferencePictures; + const VkExtensionProperties* pStdHeaderVersion; +} VkVideoSessionCreateInfoKHR; + +typedef struct VkVideoSessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoSessionParametersCreateFlagsKHR flags; + VkVideoSessionParametersKHR videoSessionParametersTemplate; + VkVideoSessionKHR videoSession; +} VkVideoSessionParametersCreateInfoKHR; + +typedef struct VkVideoSessionParametersUpdateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t updateSequenceCount; +} VkVideoSessionParametersUpdateInfoKHR; + +typedef struct VkVideoBeginCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoBeginCodingFlagsKHR flags; + VkVideoSessionKHR videoSession; + VkVideoSessionParametersKHR videoSessionParameters; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotInfoKHR* pReferenceSlots; +} VkVideoBeginCodingInfoKHR; + +typedef struct VkVideoEndCodingInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEndCodingFlagsKHR flags; +} VkVideoEndCodingInfoKHR; + +typedef struct VkVideoCodingControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoCodingControlFlagsKHR flags; +} VkVideoCodingControlInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)(VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionKHR)(VkDevice device, const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionKHR)(VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetVideoSessionMemoryRequirementsKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t* pMemoryRequirementsCount, VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindVideoSessionMemoryKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionParametersKHR)(VkDevice device, const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters); +typedef VkResult (VKAPI_PTR *PFN_vkUpdateVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); +typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBeginVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR* pBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR* pEndCodingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdControlVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR* pCodingControlInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoCapabilitiesKHR( + VkPhysicalDevice physicalDevice, + const VkVideoProfileInfoKHR* pVideoProfile, + VkVideoCapabilitiesKHR* pCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoFormatPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, + uint32_t* pVideoFormatPropertyCount, + VkVideoFormatPropertiesKHR* pVideoFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionKHR( + VkDevice device, + const VkVideoSessionCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionKHR* pVideoSession); + +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetVideoSessionMemoryRequirementsKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t* pMemoryRequirementsCount, + VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindVideoSessionMemoryKHR( + VkDevice device, + VkVideoSessionKHR videoSession, + uint32_t bindSessionMemoryInfoCount, + const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionParametersKHR( + VkDevice device, + const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkVideoSessionParametersKHR* pVideoSessionParameters); + +VKAPI_ATTR VkResult VKAPI_CALL vkUpdateVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo); + +VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionParametersKHR( + VkDevice device, + VkVideoSessionParametersKHR videoSessionParameters, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoBeginCodingInfoKHR* pBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoEndCodingInfoKHR* pEndCodingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdControlVideoCodingKHR( + VkCommandBuffer commandBuffer, + const VkVideoCodingControlInfoKHR* pCodingControlInfo); +#endif + + +// VK_KHR_video_decode_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_queue 1 +#define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 8 +#define VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME "VK_KHR_video_decode_queue" + +typedef enum VkVideoDecodeCapabilityFlagBitsKHR { + VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR = 0x00000002, + VK_VIDEO_DECODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeCapabilityFlagBitsKHR; +typedef VkFlags VkVideoDecodeCapabilityFlagsKHR; + +typedef enum VkVideoDecodeUsageFlagBitsKHR { + VK_VIDEO_DECODE_USAGE_DEFAULT_KHR = 0, + VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR = 0x00000002, + VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR = 0x00000004, + VK_VIDEO_DECODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeUsageFlagBitsKHR; +typedef VkFlags VkVideoDecodeUsageFlagsKHR; +typedef VkFlags VkVideoDecodeFlagsKHR; +typedef struct VkVideoDecodeCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoDecodeCapabilityFlagsKHR flags; +} VkVideoDecodeCapabilitiesKHR; + +typedef struct VkVideoDecodeUsageInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoDecodeUsageFlagsKHR videoUsageHints; +} VkVideoDecodeUsageInfoKHR; + +typedef struct VkVideoDecodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoDecodeFlagsKHR flags; + VkBuffer srcBuffer; + VkDeviceSize srcBufferOffset; + VkDeviceSize srcBufferRange; + VkVideoPictureResourceInfoKHR dstPictureResource; + const VkVideoReferenceSlotInfoKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotInfoKHR* pReferenceSlots; +} VkVideoDecodeInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdDecodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pDecodeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDecodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoDecodeInfoKHR* pDecodeInfo); +#endif + + +// VK_KHR_video_encode_h264 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_h264 1 +#include "vk_video/vulkan_video_codec_h264std.h" +#include "vk_video/vulkan_video_codec_h264std_encode.h" +#define VK_KHR_VIDEO_ENCODE_H264_SPEC_VERSION 14 +#define VK_KHR_VIDEO_ENCODE_H264_EXTENSION_NAME "VK_KHR_video_encode_h264" + +typedef enum VkVideoEncodeH264CapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H264_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H264_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H264_CAPABILITY_PER_SLICE_CONSTANT_QP_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H264_CAPABILITY_GENERATE_PREFIX_NALU_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H264_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH264CapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeH264CapabilityFlagsKHR; + +typedef enum VkVideoEncodeH264StdFlagBitsKHR { + VK_VIDEO_ENCODE_H264_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H264_STD_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_FLAG_SET_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H264_STD_SCALING_MATRIX_PRESENT_FLAG_SET_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H264_STD_CHROMA_QP_INDEX_OFFSET_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H264_STD_SECOND_CHROMA_QP_INDEX_OFFSET_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H264_STD_PIC_INIT_QP_MINUS26_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H264_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_EXPLICIT_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H264_STD_WEIGHTED_BIPRED_IDC_IMPLICIT_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H264_STD_TRANSFORM_8X8_MODE_FLAG_SET_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H264_STD_DIRECT_SPATIAL_MV_PRED_FLAG_UNSET_BIT_KHR = 0x00000400, + VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_UNSET_BIT_KHR = 0x00000800, + VK_VIDEO_ENCODE_H264_STD_ENTROPY_CODING_MODE_FLAG_SET_BIT_KHR = 0x00001000, + VK_VIDEO_ENCODE_H264_STD_DIRECT_8X8_INFERENCE_FLAG_UNSET_BIT_KHR = 0x00002000, + VK_VIDEO_ENCODE_H264_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR = 0x00004000, + VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_DISABLED_BIT_KHR = 0x00008000, + VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_ENABLED_BIT_KHR = 0x00010000, + VK_VIDEO_ENCODE_H264_STD_DEBLOCKING_FILTER_PARTIAL_BIT_KHR = 0x00020000, + VK_VIDEO_ENCODE_H264_STD_SLICE_QP_DELTA_BIT_KHR = 0x00080000, + VK_VIDEO_ENCODE_H264_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR = 0x00100000, + VK_VIDEO_ENCODE_H264_STD_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH264StdFlagBitsKHR; +typedef VkFlags VkVideoEncodeH264StdFlagsKHR; + +typedef enum VkVideoEncodeH264RateControlFlagBitsKHR { + VK_VIDEO_ENCODE_H264_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_REGULAR_GOP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_TEMPORAL_LAYER_PATTERN_DYADIC_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H264_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH264RateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeH264RateControlFlagsKHR; +typedef struct VkVideoEncodeH264CapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH264CapabilityFlagsKHR flags; + StdVideoH264LevelIdc maxLevelIdc; + uint32_t maxSliceCount; + uint32_t maxPPictureL0ReferenceCount; + uint32_t maxBPictureL0ReferenceCount; + uint32_t maxL1ReferenceCount; + uint32_t maxTemporalLayerCount; + VkBool32 expectDyadicTemporalLayerPattern; + int32_t minQp; + int32_t maxQp; + VkBool32 prefersGopRemainingFrames; + VkBool32 requiresGopRemainingFrames; + VkVideoEncodeH264StdFlagsKHR stdSyntaxFlags; +} VkVideoEncodeH264CapabilitiesKHR; + +typedef struct VkVideoEncodeH264QpKHR { + int32_t qpI; + int32_t qpP; + int32_t qpB; +} VkVideoEncodeH264QpKHR; + +typedef struct VkVideoEncodeH264QualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH264RateControlFlagsKHR preferredRateControlFlags; + uint32_t preferredGopFrameCount; + uint32_t preferredIdrPeriod; + uint32_t preferredConsecutiveBFrameCount; + uint32_t preferredTemporalLayerCount; + VkVideoEncodeH264QpKHR preferredConstantQp; + uint32_t preferredMaxL0ReferenceCount; + uint32_t preferredMaxL1ReferenceCount; + VkBool32 preferredStdEntropyCodingModeFlag; +} VkVideoEncodeH264QualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeH264SessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMaxLevelIdc; + StdVideoH264LevelIdc maxLevelIdc; +} VkVideoEncodeH264SessionCreateInfoKHR; + +typedef struct VkVideoEncodeH264SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdSPSCount; + const StdVideoH264SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH264PictureParameterSet* pStdPPSs; +} VkVideoEncodeH264SessionParametersAddInfoKHR; + +typedef struct VkVideoEncodeH264SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoEncodeH264SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoEncodeH264SessionParametersCreateInfoKHR; + +typedef struct VkVideoEncodeH264SessionParametersGetInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 writeStdSPS; + VkBool32 writeStdPPS; + uint32_t stdSPSId; + uint32_t stdPPSId; +} VkVideoEncodeH264SessionParametersGetInfoKHR; + +typedef struct VkVideoEncodeH264SessionParametersFeedbackInfoKHR { + VkStructureType sType; + void* pNext; + VkBool32 hasStdSPSOverrides; + VkBool32 hasStdPPSOverrides; +} VkVideoEncodeH264SessionParametersFeedbackInfoKHR; + +typedef struct VkVideoEncodeH264NaluSliceInfoKHR { + VkStructureType sType; + const void* pNext; + int32_t constantQp; + const StdVideoEncodeH264SliceHeader* pStdSliceHeader; +} VkVideoEncodeH264NaluSliceInfoKHR; + +typedef struct VkVideoEncodeH264PictureInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t naluSliceEntryCount; + const VkVideoEncodeH264NaluSliceInfoKHR* pNaluSliceEntries; + const StdVideoEncodeH264PictureInfo* pStdPictureInfo; + VkBool32 generatePrefixNalu; +} VkVideoEncodeH264PictureInfoKHR; + +typedef struct VkVideoEncodeH264DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeH264ReferenceInfo* pStdReferenceInfo; +} VkVideoEncodeH264DpbSlotInfoKHR; + +typedef struct VkVideoEncodeH264ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; +} VkVideoEncodeH264ProfileInfoKHR; + +typedef struct VkVideoEncodeH264RateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH264RateControlFlagsKHR flags; + uint32_t gopFrameCount; + uint32_t idrPeriod; + uint32_t consecutiveBFrameCount; + uint32_t temporalLayerCount; +} VkVideoEncodeH264RateControlInfoKHR; + +typedef struct VkVideoEncodeH264FrameSizeKHR { + uint32_t frameISize; + uint32_t framePSize; + uint32_t frameBSize; +} VkVideoEncodeH264FrameSizeKHR; + +typedef struct VkVideoEncodeH264RateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMinQp; + VkVideoEncodeH264QpKHR minQp; + VkBool32 useMaxQp; + VkVideoEncodeH264QpKHR maxQp; + VkBool32 useMaxFrameSize; + VkVideoEncodeH264FrameSizeKHR maxFrameSize; +} VkVideoEncodeH264RateControlLayerInfoKHR; + +typedef struct VkVideoEncodeH264GopRemainingFrameInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useGopRemainingFrames; + uint32_t gopRemainingI; + uint32_t gopRemainingP; + uint32_t gopRemainingB; +} VkVideoEncodeH264GopRemainingFrameInfoKHR; + + + +// VK_KHR_video_encode_h265 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_h265 1 +#include "vk_video/vulkan_video_codec_h265std.h" +#include "vk_video/vulkan_video_codec_h265std_encode.h" +#define VK_KHR_VIDEO_ENCODE_H265_SPEC_VERSION 14 +#define VK_KHR_VIDEO_ENCODE_H265_EXTENSION_NAME "VK_KHR_video_encode_h265" + +typedef enum VkVideoEncodeH265CapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_CAPABILITY_PREDICTION_WEIGHT_TABLE_GENERATED_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_SEGMENT_TYPE_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L0_LIST_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H265_CAPABILITY_PER_PICTURE_TYPE_MIN_MAX_QP_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H265_CAPABILITY_PER_SLICE_SEGMENT_CONSTANT_QP_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILES_PER_SLICE_SEGMENT_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_SEGMENTS_PER_TILE_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H265_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265CapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265CapabilityFlagsKHR; + +typedef enum VkVideoEncodeH265StdFlagBitsKHR { + VK_VIDEO_ENCODE_H265_STD_SEPARATE_COLOR_PLANE_FLAG_SET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_STD_SAMPLE_ADAPTIVE_OFFSET_ENABLED_FLAG_SET_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_STD_SCALING_LIST_DATA_PRESENT_FLAG_SET_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_STD_PCM_ENABLED_FLAG_SET_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_STD_SPS_TEMPORAL_MVP_ENABLED_FLAG_SET_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H265_STD_INIT_QP_MINUS26_BIT_KHR = 0x00000020, + VK_VIDEO_ENCODE_H265_STD_WEIGHTED_PRED_FLAG_SET_BIT_KHR = 0x00000040, + VK_VIDEO_ENCODE_H265_STD_WEIGHTED_BIPRED_FLAG_SET_BIT_KHR = 0x00000080, + VK_VIDEO_ENCODE_H265_STD_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_KHR = 0x00000100, + VK_VIDEO_ENCODE_H265_STD_SIGN_DATA_HIDING_ENABLED_FLAG_SET_BIT_KHR = 0x00000200, + VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_SET_BIT_KHR = 0x00000400, + VK_VIDEO_ENCODE_H265_STD_TRANSFORM_SKIP_ENABLED_FLAG_UNSET_BIT_KHR = 0x00000800, + VK_VIDEO_ENCODE_H265_STD_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_FLAG_SET_BIT_KHR = 0x00001000, + VK_VIDEO_ENCODE_H265_STD_TRANSQUANT_BYPASS_ENABLED_FLAG_SET_BIT_KHR = 0x00002000, + VK_VIDEO_ENCODE_H265_STD_CONSTRAINED_INTRA_PRED_FLAG_SET_BIT_KHR = 0x00004000, + VK_VIDEO_ENCODE_H265_STD_ENTROPY_CODING_SYNC_ENABLED_FLAG_SET_BIT_KHR = 0x00008000, + VK_VIDEO_ENCODE_H265_STD_DEBLOCKING_FILTER_OVERRIDE_ENABLED_FLAG_SET_BIT_KHR = 0x00010000, + VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENTS_ENABLED_FLAG_SET_BIT_KHR = 0x00020000, + VK_VIDEO_ENCODE_H265_STD_DEPENDENT_SLICE_SEGMENT_FLAG_SET_BIT_KHR = 0x00040000, + VK_VIDEO_ENCODE_H265_STD_SLICE_QP_DELTA_BIT_KHR = 0x00080000, + VK_VIDEO_ENCODE_H265_STD_DIFFERENT_SLICE_QP_DELTA_BIT_KHR = 0x00100000, + VK_VIDEO_ENCODE_H265_STD_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265StdFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265StdFlagsKHR; + +typedef enum VkVideoEncodeH265CtbSizeFlagBitsKHR { + VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_CTB_SIZE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265CtbSizeFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265CtbSizeFlagsKHR; + +typedef enum VkVideoEncodeH265TransformBlockSizeFlagBitsKHR { + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265TransformBlockSizeFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265TransformBlockSizeFlagsKHR; + +typedef enum VkVideoEncodeH265RateControlFlagBitsKHR { + VK_VIDEO_ENCODE_H265_RATE_CONTROL_ATTEMPT_HRD_COMPLIANCE_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_REGULAR_GOP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_FLAT_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_REFERENCE_PATTERN_DYADIC_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_TEMPORAL_SUB_LAYER_PATTERN_DYADIC_BIT_KHR = 0x00000010, + VK_VIDEO_ENCODE_H265_RATE_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeH265RateControlFlagBitsKHR; +typedef VkFlags VkVideoEncodeH265RateControlFlagsKHR; +typedef struct VkVideoEncodeH265CapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH265CapabilityFlagsKHR flags; + StdVideoH265LevelIdc maxLevelIdc; + uint32_t maxSliceSegmentCount; + VkExtent2D maxTiles; + VkVideoEncodeH265CtbSizeFlagsKHR ctbSizes; + VkVideoEncodeH265TransformBlockSizeFlagsKHR transformBlockSizes; + uint32_t maxPPictureL0ReferenceCount; + uint32_t maxBPictureL0ReferenceCount; + uint32_t maxL1ReferenceCount; + uint32_t maxSubLayerCount; + VkBool32 expectDyadicTemporalSubLayerPattern; + int32_t minQp; + int32_t maxQp; + VkBool32 prefersGopRemainingFrames; + VkBool32 requiresGopRemainingFrames; + VkVideoEncodeH265StdFlagsKHR stdSyntaxFlags; +} VkVideoEncodeH265CapabilitiesKHR; + +typedef struct VkVideoEncodeH265SessionCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMaxLevelIdc; + StdVideoH265LevelIdc maxLevelIdc; +} VkVideoEncodeH265SessionCreateInfoKHR; + +typedef struct VkVideoEncodeH265QpKHR { + int32_t qpI; + int32_t qpP; + int32_t qpB; +} VkVideoEncodeH265QpKHR; + +typedef struct VkVideoEncodeH265QualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeH265RateControlFlagsKHR preferredRateControlFlags; + uint32_t preferredGopFrameCount; + uint32_t preferredIdrPeriod; + uint32_t preferredConsecutiveBFrameCount; + uint32_t preferredSubLayerCount; + VkVideoEncodeH265QpKHR preferredConstantQp; + uint32_t preferredMaxL0ReferenceCount; + uint32_t preferredMaxL1ReferenceCount; +} VkVideoEncodeH265QualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeH265SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdVPSCount; + const StdVideoH265VideoParameterSet* pStdVPSs; + uint32_t stdSPSCount; + const StdVideoH265SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH265PictureParameterSet* pStdPPSs; +} VkVideoEncodeH265SessionParametersAddInfoKHR; + +typedef struct VkVideoEncodeH265SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdVPSCount; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoEncodeH265SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoEncodeH265SessionParametersCreateInfoKHR; + +typedef struct VkVideoEncodeH265SessionParametersGetInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 writeStdVPS; + VkBool32 writeStdSPS; + VkBool32 writeStdPPS; + uint32_t stdVPSId; + uint32_t stdSPSId; + uint32_t stdPPSId; +} VkVideoEncodeH265SessionParametersGetInfoKHR; + +typedef struct VkVideoEncodeH265SessionParametersFeedbackInfoKHR { + VkStructureType sType; + void* pNext; + VkBool32 hasStdVPSOverrides; + VkBool32 hasStdSPSOverrides; + VkBool32 hasStdPPSOverrides; +} VkVideoEncodeH265SessionParametersFeedbackInfoKHR; + +typedef struct VkVideoEncodeH265NaluSliceSegmentInfoKHR { + VkStructureType sType; + const void* pNext; + int32_t constantQp; + const StdVideoEncodeH265SliceSegmentHeader* pStdSliceSegmentHeader; +} VkVideoEncodeH265NaluSliceSegmentInfoKHR; + +typedef struct VkVideoEncodeH265PictureInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t naluSliceSegmentEntryCount; + const VkVideoEncodeH265NaluSliceSegmentInfoKHR* pNaluSliceSegmentEntries; + const StdVideoEncodeH265PictureInfo* pStdPictureInfo; +} VkVideoEncodeH265PictureInfoKHR; + +typedef struct VkVideoEncodeH265DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoEncodeH265ReferenceInfo* pStdReferenceInfo; +} VkVideoEncodeH265DpbSlotInfoKHR; + +typedef struct VkVideoEncodeH265ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH265ProfileIdc stdProfileIdc; +} VkVideoEncodeH265ProfileInfoKHR; + +typedef struct VkVideoEncodeH265RateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeH265RateControlFlagsKHR flags; + uint32_t gopFrameCount; + uint32_t idrPeriod; + uint32_t consecutiveBFrameCount; + uint32_t subLayerCount; +} VkVideoEncodeH265RateControlInfoKHR; + +typedef struct VkVideoEncodeH265FrameSizeKHR { + uint32_t frameISize; + uint32_t framePSize; + uint32_t frameBSize; +} VkVideoEncodeH265FrameSizeKHR; + +typedef struct VkVideoEncodeH265RateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useMinQp; + VkVideoEncodeH265QpKHR minQp; + VkBool32 useMaxQp; + VkVideoEncodeH265QpKHR maxQp; + VkBool32 useMaxFrameSize; + VkVideoEncodeH265FrameSizeKHR maxFrameSize; +} VkVideoEncodeH265RateControlLayerInfoKHR; + +typedef struct VkVideoEncodeH265GopRemainingFrameInfoKHR { + VkStructureType sType; + const void* pNext; + VkBool32 useGopRemainingFrames; + uint32_t gopRemainingI; + uint32_t gopRemainingP; + uint32_t gopRemainingB; +} VkVideoEncodeH265GopRemainingFrameInfoKHR; + + + +// VK_KHR_video_decode_h264 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_h264 1 +#include "vk_video/vulkan_video_codec_h264std_decode.h" +#define VK_KHR_VIDEO_DECODE_H264_SPEC_VERSION 9 +#define VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME "VK_KHR_video_decode_h264" + +typedef enum VkVideoDecodeH264PictureLayoutFlagBitsKHR { + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR = 0, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR = 0x00000001, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR = 0x00000002, + VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoDecodeH264PictureLayoutFlagBitsKHR; +typedef VkFlags VkVideoDecodeH264PictureLayoutFlagsKHR; +typedef struct VkVideoDecodeH264ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH264ProfileIdc stdProfileIdc; + VkVideoDecodeH264PictureLayoutFlagBitsKHR pictureLayout; +} VkVideoDecodeH264ProfileInfoKHR; + +typedef struct VkVideoDecodeH264CapabilitiesKHR { + VkStructureType sType; + void* pNext; + StdVideoH264LevelIdc maxLevelIdc; + VkOffset2D fieldOffsetGranularity; +} VkVideoDecodeH264CapabilitiesKHR; + +typedef struct VkVideoDecodeH264SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdSPSCount; + const StdVideoH264SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH264PictureParameterSet* pStdPPSs; +} VkVideoDecodeH264SessionParametersAddInfoKHR; + +typedef struct VkVideoDecodeH264SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoDecodeH264SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoDecodeH264SessionParametersCreateInfoKHR; + +typedef struct VkVideoDecodeH264PictureInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264PictureInfo* pStdPictureInfo; + uint32_t sliceCount; + const uint32_t* pSliceOffsets; +} VkVideoDecodeH264PictureInfoKHR; + +typedef struct VkVideoDecodeH264DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH264ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH264DpbSlotInfoKHR; + + + +// VK_KHR_dynamic_rendering is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_dynamic_rendering 1 +#define VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION 1 +#define VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME "VK_KHR_dynamic_rendering" +typedef VkRenderingFlags VkRenderingFlagsKHR; + +typedef VkRenderingFlagBits VkRenderingFlagBitsKHR; + +typedef VkRenderingInfo VkRenderingInfoKHR; + +typedef VkRenderingAttachmentInfo VkRenderingAttachmentInfoKHR; + +typedef VkPipelineRenderingCreateInfo VkPipelineRenderingCreateInfoKHR; + +typedef VkPhysicalDeviceDynamicRenderingFeatures VkPhysicalDeviceDynamicRenderingFeaturesKHR; + +typedef VkCommandBufferInheritanceRenderingInfo VkCommandBufferInheritanceRenderingInfoKHR; + +typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; + VkExtent2D shadingRateAttachmentTexelSize; +} VkRenderingFragmentShadingRateAttachmentInfoKHR; + +typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkImageLayout imageLayout; +} VkRenderingFragmentDensityMapAttachmentInfoEXT; + +typedef struct VkAttachmentSampleCountInfoAMD { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const VkSampleCountFlagBits* pColorAttachmentSamples; + VkSampleCountFlagBits depthStencilAttachmentSamples; +} VkAttachmentSampleCountInfoAMD; + +typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV; + +typedef struct VkMultiviewPerViewAttributesInfoNVX { + VkStructureType sType; + const void* pNext; + VkBool32 perViewAttributes; + VkBool32 perViewAttributesPositionXOnly; +} VkMultiviewPerViewAttributesInfoNVX; + +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderingKHR)(VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderingKHR)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderingKHR( + VkCommandBuffer commandBuffer, + const VkRenderingInfo* pRenderingInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderingKHR( + VkCommandBuffer commandBuffer); +#endif + + +// VK_KHR_multiview is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_multiview 1 +#define VK_KHR_MULTIVIEW_SPEC_VERSION 1 +#define VK_KHR_MULTIVIEW_EXTENSION_NAME "VK_KHR_multiview" +typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR; + +typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR; + +typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR; + + + +// VK_KHR_get_physical_device_properties2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_physical_device_properties2 1 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 2 +#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" +typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR; + +typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR; + +typedef VkFormatProperties2 VkFormatProperties2KHR; + +typedef VkImageFormatProperties2 VkImageFormatProperties2KHR; + +typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR; + +typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR; + +typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR; + +typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR; + +typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceFeatures2* pFeatures); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceProperties2* pProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkFormatProperties2* pFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( + VkPhysicalDevice physicalDevice, + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); +#endif + + +// VK_KHR_device_group is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_device_group 1 +#define VK_KHR_DEVICE_GROUP_SPEC_VERSION 4 +#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group" +typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR; + +typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR; + +typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR; + +typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR; + +typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR; + +typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR; + +typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR; + +typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR; + +typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR; + +typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR; + +typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); +typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask); +typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR( + VkDevice device, + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR( + VkCommandBuffer commandBuffer, + uint32_t deviceMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR( + VkCommandBuffer commandBuffer, + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); +#endif + + +// VK_KHR_shader_draw_parameters is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_draw_parameters 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 +#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" + + +// VK_KHR_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance1 1 +#define VK_KHR_MAINTENANCE_1_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_maintenance1" +// VK_KHR_MAINTENANCE1_SPEC_VERSION is a deprecated alias +#define VK_KHR_MAINTENANCE1_SPEC_VERSION VK_KHR_MAINTENANCE_1_SPEC_VERSION +// VK_KHR_MAINTENANCE1_EXTENSION_NAME is a deprecated alias +#define VK_KHR_MAINTENANCE1_EXTENSION_NAME VK_KHR_MAINTENANCE_1_EXTENSION_NAME +typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR; + +typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( + VkDevice device, + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); +#endif + + +// VK_KHR_device_group_creation is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_device_group_creation 1 +#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1 +#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation" +#define VK_MAX_DEVICE_GROUP_SIZE_KHR VK_MAX_DEVICE_GROUP_SIZE +typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR; + +typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +#endif + + +// VK_KHR_external_memory_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory_capabilities 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" +#define VK_LUID_SIZE_KHR VK_LUID_SIZE +typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR; + +typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR; + +typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR; + +typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR; + +typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR; + +typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR; + +typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR; + +typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR; + +typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR; + +typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); +#endif + + +// VK_KHR_external_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory 1 +#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" +#define VK_QUEUE_FAMILY_EXTERNAL_KHR VK_QUEUE_FAMILY_EXTERNAL +typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR; + +typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR; + +typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR; + + + +// VK_KHR_external_memory_fd is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory_fd 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" +typedef struct VkImportMemoryFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + int fd; +} VkImportMemoryFdInfoKHR; + +typedef struct VkMemoryFdPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryFdPropertiesKHR; + +typedef struct VkMemoryGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( + VkDevice device, + const VkMemoryGetFdInfoKHR* pGetFdInfo, + int* pFd); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + int fd, + VkMemoryFdPropertiesKHR* pMemoryFdProperties); +#endif + + +// VK_KHR_external_semaphore_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore_capabilities 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" +typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR; + +typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR; + +typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR; + +typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR; + +typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR; + +typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); +#endif + + +// VK_KHR_external_semaphore is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" +typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR; + +typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR; + +typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR; + + + +// VK_KHR_external_semaphore_fd is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore_fd 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" +typedef struct VkImportSemaphoreFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + int fd; +} VkImportSemaphoreFdInfoKHR; + +typedef struct VkSemaphoreGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( + VkDevice device, + const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( + VkDevice device, + const VkSemaphoreGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + + +// VK_KHR_push_descriptor is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_push_descriptor 1 +#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2 +#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" +typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t maxPushDescriptors; +} VkPhysicalDevicePushDescriptorPropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set, + uint32_t descriptorWriteCount, + const VkWriteDescriptorSet* pDescriptorWrites); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( + VkCommandBuffer commandBuffer, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkPipelineLayout layout, + uint32_t set, + const void* pData); +#endif + + +// VK_KHR_shader_float16_int8 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_float16_int8 1 +#define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1 +#define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8" +typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceShaderFloat16Int8FeaturesKHR; + +typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceFloat16Int8FeaturesKHR; + + + +// VK_KHR_16bit_storage is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_16bit_storage 1 +#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" +typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR; + + + +// VK_KHR_incremental_present is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_incremental_present 1 +#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 2 +#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" +typedef struct VkRectLayerKHR { + VkOffset2D offset; + VkExtent2D extent; + uint32_t layer; +} VkRectLayerKHR; + +typedef struct VkPresentRegionKHR { + uint32_t rectangleCount; + const VkRectLayerKHR* pRectangles; +} VkPresentRegionKHR; + +typedef struct VkPresentRegionsKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentRegionKHR* pRegions; +} VkPresentRegionsKHR; + + + +// VK_KHR_descriptor_update_template is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_descriptor_update_template 1 +typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR; + +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 +#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" +typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR; + +typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR; + +typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR; + +typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); +typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( + VkDevice device, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( + VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); +#endif + + +// VK_KHR_imageless_framebuffer is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_imageless_framebuffer 1 +#define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1 +#define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer" +typedef VkPhysicalDeviceImagelessFramebufferFeatures VkPhysicalDeviceImagelessFramebufferFeaturesKHR; + +typedef VkFramebufferAttachmentsCreateInfo VkFramebufferAttachmentsCreateInfoKHR; + +typedef VkFramebufferAttachmentImageInfo VkFramebufferAttachmentImageInfoKHR; + +typedef VkRenderPassAttachmentBeginInfo VkRenderPassAttachmentBeginInfoKHR; + + + +// VK_KHR_create_renderpass2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_create_renderpass2 1 +#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1 +#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2" +typedef VkRenderPassCreateInfo2 VkRenderPassCreateInfo2KHR; + +typedef VkAttachmentDescription2 VkAttachmentDescription2KHR; + +typedef VkAttachmentReference2 VkAttachmentReference2KHR; + +typedef VkSubpassDescription2 VkSubpassDescription2KHR; + +typedef VkSubpassDependency2 VkSubpassDependency2KHR; + +typedef VkSubpassBeginInfo VkSubpassBeginInfoKHR; + +typedef VkSubpassEndInfo VkSubpassEndInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); +typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); +typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR( + VkDevice device, + const VkRenderPassCreateInfo2* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkRenderPass* pRenderPass); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfo* pSubpassBeginInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo* pSubpassBeginInfo, + const VkSubpassEndInfo* pSubpassEndInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfo* pSubpassEndInfo); +#endif + + +// VK_KHR_shared_presentable_image is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shared_presentable_image 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 +#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" +typedef struct VkSharedPresentSurfaceCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkImageUsageFlags sharedPresentSupportedUsageFlags; +} VkSharedPresentSurfaceCapabilitiesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( + VkDevice device, + VkSwapchainKHR swapchain); +#endif + + +// VK_KHR_external_fence_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence_capabilities 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" +typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR; + +typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR; + +typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR; + +typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR; + +typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR; + +typedef VkExternalFenceProperties VkExternalFencePropertiesKHR; + +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); +#endif + + +// VK_KHR_external_fence is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence 1 +#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" +typedef VkFenceImportFlags VkFenceImportFlagsKHR; + +typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR; + +typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR; + + + +// VK_KHR_external_fence_fd is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence_fd 1 +#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" +typedef struct VkImportFenceFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + int fd; +} VkImportFenceFdInfoKHR; + +typedef struct VkFenceGetFdInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetFdInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( + VkDevice device, + const VkImportFenceFdInfoKHR* pImportFenceFdInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( + VkDevice device, + const VkFenceGetFdInfoKHR* pGetFdInfo, + int* pFd); +#endif + + +// VK_KHR_performance_query is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_performance_query 1 +#define VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION 1 +#define VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME "VK_KHR_performance_query" + +typedef enum VkPerformanceCounterUnitKHR { + VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR = 0, + VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR = 1, + VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR = 2, + VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR = 3, + VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR = 4, + VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR = 5, + VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR = 6, + VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR = 7, + VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8, + VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9, + VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10, + VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterUnitKHR; + +typedef enum VkPerformanceCounterScopeKHR { + VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR = 0, + VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR = 1, + VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR = 2, + // VK_QUERY_SCOPE_COMMAND_BUFFER_KHR is a deprecated alias + VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR, + // VK_QUERY_SCOPE_RENDER_PASS_KHR is a deprecated alias + VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR, + // VK_QUERY_SCOPE_COMMAND_KHR is a deprecated alias + VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR, + VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterScopeKHR; + +typedef enum VkPerformanceCounterStorageKHR { + VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR = 0, + VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR = 1, + VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR = 2, + VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3, + VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4, + VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5, + VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterStorageKHR; + +typedef enum VkPerformanceCounterDescriptionFlagBitsKHR { + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR = 0x00000001, + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR = 0x00000002, + // VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR is a deprecated alias + VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR, + // VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR is a deprecated alias + VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR, + VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPerformanceCounterDescriptionFlagBitsKHR; +typedef VkFlags VkPerformanceCounterDescriptionFlagsKHR; + +typedef enum VkAcquireProfilingLockFlagBitsKHR { + VK_ACQUIRE_PROFILING_LOCK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAcquireProfilingLockFlagBitsKHR; +typedef VkFlags VkAcquireProfilingLockFlagsKHR; +typedef struct VkPhysicalDevicePerformanceQueryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 performanceCounterQueryPools; + VkBool32 performanceCounterMultipleQueryPools; +} VkPhysicalDevicePerformanceQueryFeaturesKHR; + +typedef struct VkPhysicalDevicePerformanceQueryPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 allowCommandBufferQueryCopies; +} VkPhysicalDevicePerformanceQueryPropertiesKHR; + +typedef struct VkPerformanceCounterKHR { + VkStructureType sType; + void* pNext; + VkPerformanceCounterUnitKHR unit; + VkPerformanceCounterScopeKHR scope; + VkPerformanceCounterStorageKHR storage; + uint8_t uuid[VK_UUID_SIZE]; +} VkPerformanceCounterKHR; + +typedef struct VkPerformanceCounterDescriptionKHR { + VkStructureType sType; + void* pNext; + VkPerformanceCounterDescriptionFlagsKHR flags; + char name[VK_MAX_DESCRIPTION_SIZE]; + char category[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; +} VkPerformanceCounterDescriptionKHR; + +typedef struct VkQueryPoolPerformanceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t queueFamilyIndex; + uint32_t counterIndexCount; + const uint32_t* pCounterIndices; +} VkQueryPoolPerformanceCreateInfoKHR; + +typedef union VkPerformanceCounterResultKHR { + int32_t int32; + int64_t int64; + uint32_t uint32; + uint64_t uint64; + float float32; + double float64; +} VkPerformanceCounterResultKHR; + +typedef struct VkAcquireProfilingLockInfoKHR { + VkStructureType sType; + const void* pNext; + VkAcquireProfilingLockFlagsKHR flags; + uint64_t timeout; +} VkAcquireProfilingLockInfoKHR; + +typedef struct VkPerformanceQuerySubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t counterPassIndex; +} VkPerformanceQuerySubmitInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireProfilingLockKHR)(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkReleaseProfilingLockKHR)(VkDevice device); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + uint32_t* pCounterCount, + VkPerformanceCounterKHR* pCounters, + VkPerformanceCounterDescriptionKHR* pCounterDescriptions); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( + VkPhysicalDevice physicalDevice, + const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, + uint32_t* pNumPasses); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireProfilingLockKHR( + VkDevice device, + const VkAcquireProfilingLockInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR( + VkDevice device); +#endif + + +// VK_KHR_maintenance2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance2 1 +#define VK_KHR_MAINTENANCE_2_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_maintenance2" +// VK_KHR_MAINTENANCE2_SPEC_VERSION is a deprecated alias +#define VK_KHR_MAINTENANCE2_SPEC_VERSION VK_KHR_MAINTENANCE_2_SPEC_VERSION +// VK_KHR_MAINTENANCE2_EXTENSION_NAME is a deprecated alias +#define VK_KHR_MAINTENANCE2_EXTENSION_NAME VK_KHR_MAINTENANCE_2_EXTENSION_NAME +typedef VkPointClippingBehavior VkPointClippingBehaviorKHR; + +typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR; + +typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR; + +typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR; + +typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR; + +typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR; + +typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR; + + + +// VK_KHR_get_surface_capabilities2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_surface_capabilities2 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" +typedef struct VkPhysicalDeviceSurfaceInfo2KHR { + VkStructureType sType; + const void* pNext; + VkSurfaceKHR surface; +} VkPhysicalDeviceSurfaceInfo2KHR; + +typedef struct VkSurfaceCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceCapabilitiesKHR surfaceCapabilities; +} VkSurfaceCapabilities2KHR; + +typedef struct VkSurfaceFormat2KHR { + VkStructureType sType; + void* pNext; + VkSurfaceFormatKHR surfaceFormat; +} VkSurfaceFormat2KHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkSurfaceCapabilities2KHR* pSurfaceCapabilities); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pSurfaceFormatCount, + VkSurfaceFormat2KHR* pSurfaceFormats); +#endif + + +// VK_KHR_variable_pointers is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_variable_pointers 1 +#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 +#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeaturesKHR; + +typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointersFeaturesKHR; + + + +// VK_KHR_get_display_properties2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_display_properties2 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1 +#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2" +typedef struct VkDisplayProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPropertiesKHR displayProperties; +} VkDisplayProperties2KHR; + +typedef struct VkDisplayPlaneProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlanePropertiesKHR displayPlaneProperties; +} VkDisplayPlaneProperties2KHR; + +typedef struct VkDisplayModeProperties2KHR { + VkStructureType sType; + void* pNext; + VkDisplayModePropertiesKHR displayModeProperties; +} VkDisplayModeProperties2KHR; + +typedef struct VkDisplayPlaneInfo2KHR { + VkStructureType sType; + const void* pNext; + VkDisplayModeKHR mode; + uint32_t planeIndex; +} VkDisplayPlaneInfo2KHR; + +typedef struct VkDisplayPlaneCapabilities2KHR { + VkStructureType sType; + void* pNext; + VkDisplayPlaneCapabilitiesKHR capabilities; +} VkDisplayPlaneCapabilities2KHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkDisplayPlaneProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display, + uint32_t* pPropertyCount, + VkDisplayModeProperties2KHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR( + VkPhysicalDevice physicalDevice, + const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, + VkDisplayPlaneCapabilities2KHR* pCapabilities); +#endif + + +// VK_KHR_dedicated_allocation is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_dedicated_allocation 1 +#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 +#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" +typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR; + +typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR; + + + +// VK_KHR_storage_buffer_storage_class is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_storage_buffer_storage_class 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 +#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" + + +// VK_KHR_relaxed_block_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_relaxed_block_layout 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" + + +// VK_KHR_get_memory_requirements2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_get_memory_requirements2 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 +#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" +typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR; + +typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR; + +typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR; + +typedef VkMemoryRequirements2 VkMemoryRequirements2KHR; + +typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR; + +typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( + VkDevice device, + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( + VkDevice device, + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( + VkDevice device, + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + + +// VK_KHR_image_format_list is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_image_format_list 1 +#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1 +#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list" +typedef VkImageFormatListCreateInfo VkImageFormatListCreateInfoKHR; + + + +// VK_KHR_sampler_ycbcr_conversion is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_sampler_ycbcr_conversion 1 +typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR; + +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 14 +#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion" +typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR; + +typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR; + +typedef VkChromaLocation VkChromaLocationKHR; + +typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR; + +typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR; + +typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR; + +typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR; + +typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR; + +typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); +typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR( + VkDevice device, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSamplerYcbcrConversion* pYcbcrConversion); + +VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR( + VkDevice device, + VkSamplerYcbcrConversion ycbcrConversion, + const VkAllocationCallbacks* pAllocator); +#endif + + +// VK_KHR_bind_memory2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_bind_memory2 1 +#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2" +typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR; + +typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR( + VkDevice device, + uint32_t bindInfoCount, + const VkBindImageMemoryInfo* pBindInfos); +#endif + + +// VK_KHR_maintenance3 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance3 1 +#define VK_KHR_MAINTENANCE_3_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_3_EXTENSION_NAME "VK_KHR_maintenance3" +// VK_KHR_MAINTENANCE3_SPEC_VERSION is a deprecated alias +#define VK_KHR_MAINTENANCE3_SPEC_VERSION VK_KHR_MAINTENANCE_3_SPEC_VERSION +// VK_KHR_MAINTENANCE3_EXTENSION_NAME is a deprecated alias +#define VK_KHR_MAINTENANCE3_EXTENSION_NAME VK_KHR_MAINTENANCE_3_EXTENSION_NAME +typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR; + +typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR; + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR( + VkDevice device, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); +#endif + + +// VK_KHR_draw_indirect_count is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_draw_indirect_count 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 +#define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count" +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +// VK_KHR_shader_subgroup_extended_types is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_subgroup_extended_types 1 +#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION 1 +#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME "VK_KHR_shader_subgroup_extended_types" +typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR; + + + +// VK_KHR_8bit_storage is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_8bit_storage 1 +#define VK_KHR_8BIT_STORAGE_SPEC_VERSION 1 +#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage" +typedef VkPhysicalDevice8BitStorageFeatures VkPhysicalDevice8BitStorageFeaturesKHR; + + + +// VK_KHR_shader_atomic_int64 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_atomic_int64 1 +#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64" +typedef VkPhysicalDeviceShaderAtomicInt64Features VkPhysicalDeviceShaderAtomicInt64FeaturesKHR; + + + +// VK_KHR_shader_clock is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_clock 1 +#define VK_KHR_SHADER_CLOCK_SPEC_VERSION 1 +#define VK_KHR_SHADER_CLOCK_EXTENSION_NAME "VK_KHR_shader_clock" +typedef struct VkPhysicalDeviceShaderClockFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupClock; + VkBool32 shaderDeviceClock; +} VkPhysicalDeviceShaderClockFeaturesKHR; + + + +// VK_KHR_video_decode_h265 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_h265 1 +#include "vk_video/vulkan_video_codec_h265std_decode.h" +#define VK_KHR_VIDEO_DECODE_H265_SPEC_VERSION 8 +#define VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME "VK_KHR_video_decode_h265" +typedef struct VkVideoDecodeH265ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoH265ProfileIdc stdProfileIdc; +} VkVideoDecodeH265ProfileInfoKHR; + +typedef struct VkVideoDecodeH265CapabilitiesKHR { + VkStructureType sType; + void* pNext; + StdVideoH265LevelIdc maxLevelIdc; +} VkVideoDecodeH265CapabilitiesKHR; + +typedef struct VkVideoDecodeH265SessionParametersAddInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t stdVPSCount; + const StdVideoH265VideoParameterSet* pStdVPSs; + uint32_t stdSPSCount; + const StdVideoH265SequenceParameterSet* pStdSPSs; + uint32_t stdPPSCount; + const StdVideoH265PictureParameterSet* pStdPPSs; +} VkVideoDecodeH265SessionParametersAddInfoKHR; + +typedef struct VkVideoDecodeH265SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxStdVPSCount; + uint32_t maxStdSPSCount; + uint32_t maxStdPPSCount; + const VkVideoDecodeH265SessionParametersAddInfoKHR* pParametersAddInfo; +} VkVideoDecodeH265SessionParametersCreateInfoKHR; + +typedef struct VkVideoDecodeH265PictureInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH265PictureInfo* pStdPictureInfo; + uint32_t sliceSegmentCount; + const uint32_t* pSliceSegmentOffsets; +} VkVideoDecodeH265PictureInfoKHR; + +typedef struct VkVideoDecodeH265DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeH265ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeH265DpbSlotInfoKHR; + + + +// VK_KHR_global_priority is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_global_priority 1 +#define VK_MAX_GLOBAL_PRIORITY_SIZE_KHR 16U +#define VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION 1 +#define VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME "VK_KHR_global_priority" + +typedef enum VkQueueGlobalPriorityKHR { + VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR = 128, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR = 256, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR = 512, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR = 1024, + VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR, + VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR, + VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR, + VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR, + VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_KHR = 0x7FFFFFFF +} VkQueueGlobalPriorityKHR; +typedef struct VkDeviceQueueGlobalPriorityCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkQueueGlobalPriorityKHR globalPriority; +} VkDeviceQueueGlobalPriorityCreateInfoKHR; + +typedef struct VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 globalPriorityQuery; +} VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR; + +typedef struct VkQueueFamilyGlobalPriorityPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t priorityCount; + VkQueueGlobalPriorityKHR priorities[VK_MAX_GLOBAL_PRIORITY_SIZE_KHR]; +} VkQueueFamilyGlobalPriorityPropertiesKHR; + + + +// VK_KHR_driver_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_driver_properties 1 +#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties" +#define VK_MAX_DRIVER_NAME_SIZE_KHR VK_MAX_DRIVER_NAME_SIZE +#define VK_MAX_DRIVER_INFO_SIZE_KHR VK_MAX_DRIVER_INFO_SIZE +typedef VkDriverId VkDriverIdKHR; + +typedef VkConformanceVersion VkConformanceVersionKHR; + +typedef VkPhysicalDeviceDriverProperties VkPhysicalDeviceDriverPropertiesKHR; + + + +// VK_KHR_shader_float_controls is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_float_controls 1 +#define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4 +#define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls" +typedef VkShaderFloatControlsIndependence VkShaderFloatControlsIndependenceKHR; + +typedef VkPhysicalDeviceFloatControlsProperties VkPhysicalDeviceFloatControlsPropertiesKHR; + + + +// VK_KHR_depth_stencil_resolve is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_depth_stencil_resolve 1 +#define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1 +#define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve" +typedef VkResolveModeFlagBits VkResolveModeFlagBitsKHR; + +typedef VkResolveModeFlags VkResolveModeFlagsKHR; + +typedef VkSubpassDescriptionDepthStencilResolve VkSubpassDescriptionDepthStencilResolveKHR; + +typedef VkPhysicalDeviceDepthStencilResolveProperties VkPhysicalDeviceDepthStencilResolvePropertiesKHR; + + + +// VK_KHR_swapchain_mutable_format is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_swapchain_mutable_format 1 +#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION 1 +#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME "VK_KHR_swapchain_mutable_format" + + +// VK_KHR_timeline_semaphore is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_timeline_semaphore 1 +#define VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION 2 +#define VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME "VK_KHR_timeline_semaphore" +typedef VkSemaphoreType VkSemaphoreTypeKHR; + +typedef VkSemaphoreWaitFlagBits VkSemaphoreWaitFlagBitsKHR; + +typedef VkSemaphoreWaitFlags VkSemaphoreWaitFlagsKHR; + +typedef VkPhysicalDeviceTimelineSemaphoreFeatures VkPhysicalDeviceTimelineSemaphoreFeaturesKHR; + +typedef VkPhysicalDeviceTimelineSemaphoreProperties VkPhysicalDeviceTimelineSemaphorePropertiesKHR; + +typedef VkSemaphoreTypeCreateInfo VkSemaphoreTypeCreateInfoKHR; + +typedef VkTimelineSemaphoreSubmitInfo VkTimelineSemaphoreSubmitInfoKHR; + +typedef VkSemaphoreWaitInfo VkSemaphoreWaitInfoKHR; + +typedef VkSemaphoreSignalInfo VkSemaphoreSignalInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValueKHR)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue); +typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphoresKHR)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); +typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphoreKHR)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValueKHR( + VkDevice device, + VkSemaphore semaphore, + uint64_t* pValue); + +VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphoresKHR( + VkDevice device, + const VkSemaphoreWaitInfo* pWaitInfo, + uint64_t timeout); + +VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphoreKHR( + VkDevice device, + const VkSemaphoreSignalInfo* pSignalInfo); +#endif + + +// VK_KHR_vulkan_memory_model is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_vulkan_memory_model 1 +#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3 +#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model" +typedef VkPhysicalDeviceVulkanMemoryModelFeatures VkPhysicalDeviceVulkanMemoryModelFeaturesKHR; + + + +// VK_KHR_shader_terminate_invocation is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_terminate_invocation 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION 1 +#define VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME "VK_KHR_shader_terminate_invocation" +typedef VkPhysicalDeviceShaderTerminateInvocationFeatures VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR; + + + +// VK_KHR_fragment_shading_rate is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_fragment_shading_rate 1 +#define VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION 2 +#define VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "VK_KHR_fragment_shading_rate" + +typedef enum VkFragmentShadingRateCombinerOpKHR { + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR = 0, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR = 1, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR = 2, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR = 3, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR = 4, + VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_ENUM_KHR = 0x7FFFFFFF +} VkFragmentShadingRateCombinerOpKHR; +typedef struct VkFragmentShadingRateAttachmentInfoKHR { + VkStructureType sType; + const void* pNext; + const VkAttachmentReference2* pFragmentShadingRateAttachment; + VkExtent2D shadingRateAttachmentTexelSize; +} VkFragmentShadingRateAttachmentInfoKHR; + +typedef struct VkPipelineFragmentShadingRateStateCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkExtent2D fragmentSize; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateStateCreateInfoKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineFragmentShadingRate; + VkBool32 primitiveFragmentShadingRate; + VkBool32 attachmentFragmentShadingRate; +} VkPhysicalDeviceFragmentShadingRateFeaturesKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR { + VkStructureType sType; + void* pNext; + VkExtent2D minFragmentShadingRateAttachmentTexelSize; + VkExtent2D maxFragmentShadingRateAttachmentTexelSize; + uint32_t maxFragmentShadingRateAttachmentTexelSizeAspectRatio; + VkBool32 primitiveFragmentShadingRateWithMultipleViewports; + VkBool32 layeredShadingRateAttachments; + VkBool32 fragmentShadingRateNonTrivialCombinerOps; + VkExtent2D maxFragmentSize; + uint32_t maxFragmentSizeAspectRatio; + uint32_t maxFragmentShadingRateCoverageSamples; + VkSampleCountFlagBits maxFragmentShadingRateRasterizationSamples; + VkBool32 fragmentShadingRateWithShaderDepthStencilWrites; + VkBool32 fragmentShadingRateWithSampleMask; + VkBool32 fragmentShadingRateWithShaderSampleMask; + VkBool32 fragmentShadingRateWithConservativeRasterization; + VkBool32 fragmentShadingRateWithFragmentShaderInterlock; + VkBool32 fragmentShadingRateWithCustomSampleLocations; + VkBool32 fragmentShadingRateStrictMultiplyCombiner; +} VkPhysicalDeviceFragmentShadingRatePropertiesKHR; + +typedef struct VkPhysicalDeviceFragmentShadingRateKHR { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleCounts; + VkExtent2D fragmentSize; +} VkPhysicalDeviceFragmentShadingRateKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceFragmentShadingRatesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateKHR( + VkCommandBuffer commandBuffer, + const VkExtent2D* pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +#endif + + +// VK_KHR_dynamic_rendering_local_read is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_dynamic_rendering_local_read 1 +#define VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_SPEC_VERSION 1 +#define VK_KHR_DYNAMIC_RENDERING_LOCAL_READ_EXTENSION_NAME "VK_KHR_dynamic_rendering_local_read" +typedef struct VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 dynamicRenderingLocalRead; +} VkPhysicalDeviceDynamicRenderingLocalReadFeaturesKHR; + +typedef struct VkRenderingAttachmentLocationInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const uint32_t* pColorAttachmentLocations; +} VkRenderingAttachmentLocationInfoKHR; + +typedef struct VkRenderingInputAttachmentIndexInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t colorAttachmentCount; + const uint32_t* pColorAttachmentInputIndices; + const uint32_t* pDepthInputAttachmentIndex; + const uint32_t* pStencilInputAttachmentIndex; +} VkRenderingInputAttachmentIndexInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingAttachmentLocationsKHR)(VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfoKHR* pLocationInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetRenderingInputAttachmentIndicesKHR)(VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfoKHR* pInputAttachmentIndexInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingAttachmentLocationsKHR( + VkCommandBuffer commandBuffer, + const VkRenderingAttachmentLocationInfoKHR* pLocationInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRenderingInputAttachmentIndicesKHR( + VkCommandBuffer commandBuffer, + const VkRenderingInputAttachmentIndexInfoKHR* pInputAttachmentIndexInfo); +#endif + + +// VK_KHR_shader_quad_control is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_quad_control 1 +#define VK_KHR_SHADER_QUAD_CONTROL_SPEC_VERSION 1 +#define VK_KHR_SHADER_QUAD_CONTROL_EXTENSION_NAME "VK_KHR_shader_quad_control" +typedef struct VkPhysicalDeviceShaderQuadControlFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderQuadControl; +} VkPhysicalDeviceShaderQuadControlFeaturesKHR; + + + +// VK_KHR_spirv_1_4 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_spirv_1_4 1 +#define VK_KHR_SPIRV_1_4_SPEC_VERSION 1 +#define VK_KHR_SPIRV_1_4_EXTENSION_NAME "VK_KHR_spirv_1_4" + + +// VK_KHR_surface_protected_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_surface_protected_capabilities 1 +#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION 1 +#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME "VK_KHR_surface_protected_capabilities" +typedef struct VkSurfaceProtectedCapabilitiesKHR { + VkStructureType sType; + const void* pNext; + VkBool32 supportsProtected; +} VkSurfaceProtectedCapabilitiesKHR; + + + +// VK_KHR_separate_depth_stencil_layouts is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_separate_depth_stencil_layouts 1 +#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION 1 +#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME "VK_KHR_separate_depth_stencil_layouts" +typedef VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR; + +typedef VkAttachmentReferenceStencilLayout VkAttachmentReferenceStencilLayoutKHR; + +typedef VkAttachmentDescriptionStencilLayout VkAttachmentDescriptionStencilLayoutKHR; + + + +// VK_KHR_present_wait is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_present_wait 1 +#define VK_KHR_PRESENT_WAIT_SPEC_VERSION 1 +#define VK_KHR_PRESENT_WAIT_EXTENSION_NAME "VK_KHR_present_wait" +typedef struct VkPhysicalDevicePresentWaitFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 presentWait; +} VkPhysicalDevicePresentWaitFeaturesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkWaitForPresentKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkWaitForPresentKHR( + VkDevice device, + VkSwapchainKHR swapchain, + uint64_t presentId, + uint64_t timeout); +#endif + + +// VK_KHR_uniform_buffer_standard_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_uniform_buffer_standard_layout 1 +#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout" +typedef VkPhysicalDeviceUniformBufferStandardLayoutFeatures VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR; + + + +// VK_KHR_buffer_device_address is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_buffer_device_address 1 +#define VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 1 +#define VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_KHR_buffer_device_address" +typedef VkPhysicalDeviceBufferDeviceAddressFeatures VkPhysicalDeviceBufferDeviceAddressFeaturesKHR; + +typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoKHR; + +typedef VkBufferOpaqueCaptureAddressCreateInfo VkBufferOpaqueCaptureAddressCreateInfoKHR; + +typedef VkMemoryOpaqueCaptureAddressAllocateInfo VkMemoryOpaqueCaptureAddressAllocateInfoKHR; + +typedef VkDeviceMemoryOpaqueCaptureAddressInfo VkDeviceMemoryOpaqueCaptureAddressInfoKHR; + +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); +typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressKHR( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddressKHR( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); + +VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR( + VkDevice device, + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); +#endif + + +// VK_KHR_deferred_host_operations is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_deferred_host_operations 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR) +#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 4 +#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations" +typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation); +typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator); +typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation); +typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR( + VkDevice device, + const VkAllocationCallbacks* pAllocator, + VkDeferredOperationKHR* pDeferredOperation); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR( + VkDevice device, + VkDeferredOperationKHR operation, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR( + VkDevice device, + VkDeferredOperationKHR operation); + +VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR( + VkDevice device, + VkDeferredOperationKHR operation); +#endif + + +// VK_KHR_pipeline_executable_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_pipeline_executable_properties 1 +#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties" + +typedef enum VkPipelineExecutableStatisticFormatKHR { + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR = 0, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3, + VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPipelineExecutableStatisticFormatKHR; +typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineExecutableInfo; +} VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR; + +typedef struct VkPipelineInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipeline pipeline; +} VkPipelineInfoKHR; + +typedef struct VkPipelineExecutablePropertiesKHR { + VkStructureType sType; + void* pNext; + VkShaderStageFlags stages; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + uint32_t subgroupSize; +} VkPipelineExecutablePropertiesKHR; + +typedef struct VkPipelineExecutableInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipeline pipeline; + uint32_t executableIndex; +} VkPipelineExecutableInfoKHR; + +typedef union VkPipelineExecutableStatisticValueKHR { + VkBool32 b32; + int64_t i64; + uint64_t u64; + double f64; +} VkPipelineExecutableStatisticValueKHR; + +typedef struct VkPipelineExecutableStatisticKHR { + VkStructureType sType; + void* pNext; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkPipelineExecutableStatisticFormatKHR format; + VkPipelineExecutableStatisticValueKHR value; +} VkPipelineExecutableStatisticKHR; + +typedef struct VkPipelineExecutableInternalRepresentationKHR { + VkStructureType sType; + void* pNext; + char name[VK_MAX_DESCRIPTION_SIZE]; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkBool32 isText; + size_t dataSize; + void* pData; +} VkPipelineExecutableInternalRepresentationKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutablePropertiesKHR)(VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableStatisticsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableInternalRepresentationsKHR)(VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutablePropertiesKHR( + VkDevice device, + const VkPipelineInfoKHR* pPipelineInfo, + uint32_t* pExecutableCount, + VkPipelineExecutablePropertiesKHR* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableStatisticsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pStatisticCount, + VkPipelineExecutableStatisticKHR* pStatistics); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR( + VkDevice device, + const VkPipelineExecutableInfoKHR* pExecutableInfo, + uint32_t* pInternalRepresentationCount, + VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); +#endif + + +// VK_KHR_map_memory2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_map_memory2 1 +#define VK_KHR_MAP_MEMORY_2_SPEC_VERSION 1 +#define VK_KHR_MAP_MEMORY_2_EXTENSION_NAME "VK_KHR_map_memory2" + +typedef enum VkMemoryUnmapFlagBitsKHR { + VK_MEMORY_UNMAP_RESERVE_BIT_EXT = 0x00000001, + VK_MEMORY_UNMAP_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkMemoryUnmapFlagBitsKHR; +typedef VkFlags VkMemoryUnmapFlagsKHR; +typedef struct VkMemoryMapInfoKHR { + VkStructureType sType; + const void* pNext; + VkMemoryMapFlags flags; + VkDeviceMemory memory; + VkDeviceSize offset; + VkDeviceSize size; +} VkMemoryMapInfoKHR; + +typedef struct VkMemoryUnmapInfoKHR { + VkStructureType sType; + const void* pNext; + VkMemoryUnmapFlagsKHR flags; + VkDeviceMemory memory; +} VkMemoryUnmapInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkMapMemory2KHR)(VkDevice device, const VkMemoryMapInfoKHR* pMemoryMapInfo, void** ppData); +typedef VkResult (VKAPI_PTR *PFN_vkUnmapMemory2KHR)(VkDevice device, const VkMemoryUnmapInfoKHR* pMemoryUnmapInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory2KHR( + VkDevice device, + const VkMemoryMapInfoKHR* pMemoryMapInfo, + void** ppData); + +VKAPI_ATTR VkResult VKAPI_CALL vkUnmapMemory2KHR( + VkDevice device, + const VkMemoryUnmapInfoKHR* pMemoryUnmapInfo); +#endif + + +// VK_KHR_shader_integer_dot_product is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_integer_dot_product 1 +#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION 1 +#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME "VK_KHR_shader_integer_dot_product" +typedef VkPhysicalDeviceShaderIntegerDotProductFeatures VkPhysicalDeviceShaderIntegerDotProductFeaturesKHR; + +typedef VkPhysicalDeviceShaderIntegerDotProductProperties VkPhysicalDeviceShaderIntegerDotProductPropertiesKHR; + + + +// VK_KHR_pipeline_library is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_pipeline_library 1 +#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library" +typedef struct VkPipelineLibraryCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t libraryCount; + const VkPipeline* pLibraries; +} VkPipelineLibraryCreateInfoKHR; + + + +// VK_KHR_shader_non_semantic_info is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_non_semantic_info 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1 +#define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info" + + +// VK_KHR_present_id is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_present_id 1 +#define VK_KHR_PRESENT_ID_SPEC_VERSION 1 +#define VK_KHR_PRESENT_ID_EXTENSION_NAME "VK_KHR_present_id" +typedef struct VkPresentIdKHR { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const uint64_t* pPresentIds; +} VkPresentIdKHR; + +typedef struct VkPhysicalDevicePresentIdFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 presentId; +} VkPhysicalDevicePresentIdFeaturesKHR; + + + +// VK_KHR_video_encode_queue is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_encode_queue 1 +#define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 12 +#define VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME "VK_KHR_video_encode_queue" + +typedef enum VkVideoEncodeTuningModeKHR { + VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR = 1, + VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR = 2, + VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR = 3, + VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR = 4, + VK_VIDEO_ENCODE_TUNING_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeTuningModeKHR; +typedef VkFlags VkVideoEncodeFlagsKHR; + +typedef enum VkVideoEncodeCapabilityFlagBitsKHR { + VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_CAPABILITY_INSUFFICIENT_BITSTREAM_BUFFER_RANGE_DETECTION_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeCapabilityFlagBitsKHR; +typedef VkFlags VkVideoEncodeCapabilityFlagsKHR; + +typedef enum VkVideoEncodeRateControlModeFlagBitsKHR { + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_DISABLED_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_RATE_CONTROL_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeRateControlModeFlagBitsKHR; +typedef VkFlags VkVideoEncodeRateControlModeFlagsKHR; + +typedef enum VkVideoEncodeFeedbackFlagBitsKHR { + VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BUFFER_OFFSET_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_BYTES_WRITTEN_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_FEEDBACK_BITSTREAM_HAS_OVERRIDES_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_FEEDBACK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeFeedbackFlagBitsKHR; +typedef VkFlags VkVideoEncodeFeedbackFlagsKHR; + +typedef enum VkVideoEncodeUsageFlagBitsKHR { + VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR = 0x00000008, + VK_VIDEO_ENCODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeUsageFlagBitsKHR; +typedef VkFlags VkVideoEncodeUsageFlagsKHR; + +typedef enum VkVideoEncodeContentFlagBitsKHR { + VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR = 0, + VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR = 0x00000001, + VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR = 0x00000002, + VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR = 0x00000004, + VK_VIDEO_ENCODE_CONTENT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkVideoEncodeContentFlagBitsKHR; +typedef VkFlags VkVideoEncodeContentFlagsKHR; +typedef VkFlags VkVideoEncodeRateControlFlagsKHR; +typedef struct VkVideoEncodeInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeFlagsKHR flags; + VkBuffer dstBuffer; + VkDeviceSize dstBufferOffset; + VkDeviceSize dstBufferRange; + VkVideoPictureResourceInfoKHR srcPictureResource; + const VkVideoReferenceSlotInfoKHR* pSetupReferenceSlot; + uint32_t referenceSlotCount; + const VkVideoReferenceSlotInfoKHR* pReferenceSlots; + uint32_t precedingExternallyEncodedBytes; +} VkVideoEncodeInfoKHR; + +typedef struct VkVideoEncodeCapabilitiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeCapabilityFlagsKHR flags; + VkVideoEncodeRateControlModeFlagsKHR rateControlModes; + uint32_t maxRateControlLayers; + uint64_t maxBitrate; + uint32_t maxQualityLevels; + VkExtent2D encodeInputPictureGranularity; + VkVideoEncodeFeedbackFlagsKHR supportedEncodeFeedbackFlags; +} VkVideoEncodeCapabilitiesKHR; + +typedef struct VkQueryPoolVideoEncodeFeedbackCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeFeedbackFlagsKHR encodeFeedbackFlags; +} VkQueryPoolVideoEncodeFeedbackCreateInfoKHR; + +typedef struct VkVideoEncodeUsageInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeUsageFlagsKHR videoUsageHints; + VkVideoEncodeContentFlagsKHR videoContentHints; + VkVideoEncodeTuningModeKHR tuningMode; +} VkVideoEncodeUsageInfoKHR; + +typedef struct VkVideoEncodeRateControlLayerInfoKHR { + VkStructureType sType; + const void* pNext; + uint64_t averageBitrate; + uint64_t maxBitrate; + uint32_t frameRateNumerator; + uint32_t frameRateDenominator; +} VkVideoEncodeRateControlLayerInfoKHR; + +typedef struct VkVideoEncodeRateControlInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoEncodeRateControlFlagsKHR flags; + VkVideoEncodeRateControlModeFlagBitsKHR rateControlMode; + uint32_t layerCount; + const VkVideoEncodeRateControlLayerInfoKHR* pLayers; + uint32_t virtualBufferSizeInMs; + uint32_t initialVirtualBufferSizeInMs; +} VkVideoEncodeRateControlInfoKHR; + +typedef struct VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR { + VkStructureType sType; + const void* pNext; + const VkVideoProfileInfoKHR* pVideoProfile; + uint32_t qualityLevel; +} VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR; + +typedef struct VkVideoEncodeQualityLevelPropertiesKHR { + VkStructureType sType; + void* pNext; + VkVideoEncodeRateControlModeFlagBitsKHR preferredRateControlMode; + uint32_t preferredRateControlLayerCount; +} VkVideoEncodeQualityLevelPropertiesKHR; + +typedef struct VkVideoEncodeQualityLevelInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t qualityLevel; +} VkVideoEncodeQualityLevelInfoKHR; + +typedef struct VkVideoEncodeSessionParametersGetInfoKHR { + VkStructureType sType; + const void* pNext; + VkVideoSessionParametersKHR videoSessionParameters; +} VkVideoEncodeSessionParametersGetInfoKHR; + +typedef struct VkVideoEncodeSessionParametersFeedbackInfoKHR { + VkStructureType sType; + void* pNext; + VkBool32 hasOverrides; +} VkVideoEncodeSessionParametersFeedbackInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR* pQualityLevelInfo, VkVideoEncodeQualityLevelPropertiesKHR* pQualityLevelProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetEncodedVideoSessionParametersKHR)(VkDevice device, const VkVideoEncodeSessionParametersGetInfoKHR* pVideoSessionParametersInfo, VkVideoEncodeSessionParametersFeedbackInfoKHR* pFeedbackInfo, size_t* pDataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdEncodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR* pQualityLevelInfo, + VkVideoEncodeQualityLevelPropertiesKHR* pQualityLevelProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetEncodedVideoSessionParametersKHR( + VkDevice device, + const VkVideoEncodeSessionParametersGetInfoKHR* pVideoSessionParametersInfo, + VkVideoEncodeSessionParametersFeedbackInfoKHR* pFeedbackInfo, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdEncodeVideoKHR( + VkCommandBuffer commandBuffer, + const VkVideoEncodeInfoKHR* pEncodeInfo); +#endif + + +// VK_KHR_synchronization2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_synchronization2 1 +#define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1 +#define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2" +typedef VkPipelineStageFlags2 VkPipelineStageFlags2KHR; + +typedef VkPipelineStageFlagBits2 VkPipelineStageFlagBits2KHR; + +typedef VkAccessFlags2 VkAccessFlags2KHR; + +typedef VkAccessFlagBits2 VkAccessFlagBits2KHR; + +typedef VkSubmitFlagBits VkSubmitFlagBitsKHR; + +typedef VkSubmitFlags VkSubmitFlagsKHR; + +typedef VkMemoryBarrier2 VkMemoryBarrier2KHR; + +typedef VkBufferMemoryBarrier2 VkBufferMemoryBarrier2KHR; + +typedef VkImageMemoryBarrier2 VkImageMemoryBarrier2KHR; + +typedef VkDependencyInfo VkDependencyInfoKHR; + +typedef VkSubmitInfo2 VkSubmitInfo2KHR; + +typedef VkSemaphoreSubmitInfo VkSemaphoreSubmitInfoKHR; + +typedef VkCommandBufferSubmitInfo VkCommandBufferSubmitInfoKHR; + +typedef VkPhysicalDeviceSynchronization2Features VkPhysicalDeviceSynchronization2FeaturesKHR; + +typedef struct VkQueueFamilyCheckpointProperties2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 checkpointExecutionStageMask; +} VkQueueFamilyCheckpointProperties2NV; + +typedef struct VkCheckpointData2NV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags2 stage; + void* pCheckpointMarker; +} VkCheckpointData2NV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); +typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); +typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2 stageMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2KHR( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfo* pDependencyInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2KHR( + VkCommandBuffer commandBuffer, + const VkDependencyInfo* pDependencyInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2KHR( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkQueryPool queryPool, + uint32_t query); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2* pSubmits, + VkFence fence); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointData2NV* pCheckpointData); +#endif + + +// VK_KHR_fragment_shader_barycentric is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_fragment_shader_barycentric 1 +#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 +#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_KHR_fragment_shader_barycentric" +typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShaderBarycentric; +} VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR; + +typedef struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 triStripVertexOrderIndependentOfProvokingVertex; +} VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR; + + + +// VK_KHR_shader_subgroup_uniform_control_flow is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_subgroup_uniform_control_flow 1 +#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION 1 +#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME "VK_KHR_shader_subgroup_uniform_control_flow" +typedef struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupUniformControlFlow; +} VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR; + + + +// VK_KHR_zero_initialize_workgroup_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_zero_initialize_workgroup_memory 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1 +#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory" +typedef VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR; + + + +// VK_KHR_workgroup_memory_explicit_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_workgroup_memory_explicit_layout 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION 1 +#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME "VK_KHR_workgroup_memory_explicit_layout" +typedef struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 workgroupMemoryExplicitLayout; + VkBool32 workgroupMemoryExplicitLayoutScalarBlockLayout; + VkBool32 workgroupMemoryExplicitLayout8BitAccess; + VkBool32 workgroupMemoryExplicitLayout16BitAccess; +} VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR; + + + +// VK_KHR_copy_commands2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_copy_commands2 1 +#define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1 +#define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2" +typedef VkCopyBufferInfo2 VkCopyBufferInfo2KHR; + +typedef VkCopyImageInfo2 VkCopyImageInfo2KHR; + +typedef VkCopyBufferToImageInfo2 VkCopyBufferToImageInfo2KHR; + +typedef VkCopyImageToBufferInfo2 VkCopyImageToBufferInfo2KHR; + +typedef VkBlitImageInfo2 VkBlitImageInfo2KHR; + +typedef VkResolveImageInfo2 VkResolveImageInfo2KHR; + +typedef VkBufferCopy2 VkBufferCopy2KHR; + +typedef VkImageCopy2 VkImageCopy2KHR; + +typedef VkImageBlit2 VkImageBlit2KHR; + +typedef VkBufferImageCopy2 VkBufferImageCopy2KHR; + +typedef VkImageResolve2 VkImageResolve2KHR; + +typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2KHR)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo); +typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2KHR)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2* pCopyBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2* pCopyImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2* pBlitImageInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR( + VkCommandBuffer commandBuffer, + const VkResolveImageInfo2* pResolveImageInfo); +#endif + + +// VK_KHR_format_feature_flags2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_format_feature_flags2 1 +#define VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION 2 +#define VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME "VK_KHR_format_feature_flags2" +typedef VkFormatFeatureFlags2 VkFormatFeatureFlags2KHR; + +typedef VkFormatFeatureFlagBits2 VkFormatFeatureFlagBits2KHR; + +typedef VkFormatProperties3 VkFormatProperties3KHR; + + + +// VK_KHR_ray_tracing_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_tracing_maintenance1 1 +#define VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_ray_tracing_maintenance1" +typedef struct VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingMaintenance1; + VkBool32 rayTracingPipelineTraceRaysIndirect2; +} VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR; + +typedef struct VkTraceRaysIndirectCommand2KHR { + VkDeviceAddress raygenShaderRecordAddress; + VkDeviceSize raygenShaderRecordSize; + VkDeviceAddress missShaderBindingTableAddress; + VkDeviceSize missShaderBindingTableSize; + VkDeviceSize missShaderBindingTableStride; + VkDeviceAddress hitShaderBindingTableAddress; + VkDeviceSize hitShaderBindingTableSize; + VkDeviceSize hitShaderBindingTableStride; + VkDeviceAddress callableShaderBindingTableAddress; + VkDeviceSize callableShaderBindingTableSize; + VkDeviceSize callableShaderBindingTableStride; + uint32_t width; + uint32_t height; + uint32_t depth; +} VkTraceRaysIndirectCommand2KHR; + +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirect2KHR)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirect2KHR( + VkCommandBuffer commandBuffer, + VkDeviceAddress indirectDeviceAddress); +#endif + + +// VK_KHR_portability_enumeration is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_portability_enumeration 1 +#define VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION 1 +#define VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME "VK_KHR_portability_enumeration" + + +// VK_KHR_maintenance4 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance4 1 +#define VK_KHR_MAINTENANCE_4_SPEC_VERSION 2 +#define VK_KHR_MAINTENANCE_4_EXTENSION_NAME "VK_KHR_maintenance4" +typedef VkPhysicalDeviceMaintenance4Features VkPhysicalDeviceMaintenance4FeaturesKHR; + +typedef VkPhysicalDeviceMaintenance4Properties VkPhysicalDeviceMaintenance4PropertiesKHR; + +typedef VkDeviceBufferMemoryRequirements VkDeviceBufferMemoryRequirementsKHR; + +typedef VkDeviceImageMemoryRequirements VkDeviceImageMemoryRequirementsKHR; + +typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirementsKHR)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirementsKHR( + VkDevice device, + const VkDeviceBufferMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirementsKHR( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirementsKHR( + VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); +#endif + + +// VK_KHR_shader_subgroup_rotate is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_subgroup_rotate 1 +#define VK_KHR_SHADER_SUBGROUP_ROTATE_SPEC_VERSION 2 +#define VK_KHR_SHADER_SUBGROUP_ROTATE_EXTENSION_NAME "VK_KHR_shader_subgroup_rotate" +typedef struct VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderSubgroupRotate; + VkBool32 shaderSubgroupRotateClustered; +} VkPhysicalDeviceShaderSubgroupRotateFeaturesKHR; + + + +// VK_KHR_shader_maximal_reconvergence is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_maximal_reconvergence 1 +#define VK_KHR_SHADER_MAXIMAL_RECONVERGENCE_SPEC_VERSION 1 +#define VK_KHR_SHADER_MAXIMAL_RECONVERGENCE_EXTENSION_NAME "VK_KHR_shader_maximal_reconvergence" +typedef struct VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderMaximalReconvergence; +} VkPhysicalDeviceShaderMaximalReconvergenceFeaturesKHR; + + + +// VK_KHR_maintenance5 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance5 1 +#define VK_KHR_MAINTENANCE_5_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_5_EXTENSION_NAME "VK_KHR_maintenance5" +typedef VkFlags64 VkPipelineCreateFlags2KHR; + +// Flag bits for VkPipelineCreateFlagBits2KHR +typedef VkFlags64 VkPipelineCreateFlagBits2KHR; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISABLE_OPTIMIZATION_BIT_KHR = 0x00000001ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ALLOW_DERIVATIVES_BIT_KHR = 0x00000002ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DERIVATIVE_BIT_KHR = 0x00000004ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x400000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = 0x00000008ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DISPATCH_BASE_BIT_KHR = 0x00000010ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEFER_COMPILE_BIT_NV = 0x00000020ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_STATISTICS_BIT_KHR = 0x00000040ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_KHR = 0x00000100ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_EARLY_RETURN_ON_FAILURE_BIT_KHR = 0x00000200ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_LIBRARY_BIT_KHR = 0x00000800ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_NV = 0x00040000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_RAY_TRACING_DISPLACEMENT_MICROMAP_BIT_NV = 0x10000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_CAPTURE_DATA_BIT_KHR = 0x80000000ULL; +static const VkPipelineCreateFlagBits2KHR VK_PIPELINE_CREATE_2_INDIRECT_BINDABLE_BIT_EXT = 0x4000000000ULL; + +typedef VkFlags64 VkBufferUsageFlags2KHR; + +// Flag bits for VkBufferUsageFlagBits2KHR +typedef VkFlags64 VkBufferUsageFlagBits2KHR; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFER_SRC_BIT_KHR = 0x00000001ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFER_DST_BIT_KHR = 0x00000002ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000004ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_UNIFORM_BUFFER_BIT_KHR = 0x00000010ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_STORAGE_BUFFER_BIT_KHR = 0x00000020ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDEX_BUFFER_BIT_KHR = 0x00000040ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VERTEX_BUFFER_BIT_KHR = 0x00000080ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_INDIRECT_BUFFER_BIT_KHR = 0x00000100ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_EXECUTION_GRAPH_SCRATCH_BIT_AMDX = 0x02000000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RAY_TRACING_BIT_NV = 0x00000400ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_DECODE_DST_BIT_KHR = 0x00004000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SHADER_DEVICE_ADDRESS_BIT_KHR = 0x00020000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_MICROMAP_STORAGE_BIT_EXT = 0x01000000ULL; +static const VkBufferUsageFlagBits2KHR VK_BUFFER_USAGE_2_PREPROCESS_BUFFER_BIT_EXT = 0x80000000ULL; + +typedef struct VkPhysicalDeviceMaintenance5FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 maintenance5; +} VkPhysicalDeviceMaintenance5FeaturesKHR; + +typedef struct VkPhysicalDeviceMaintenance5PropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 earlyFragmentMultisampleCoverageAfterSampleCounting; + VkBool32 earlyFragmentSampleMaskTestBeforeSampleCounting; + VkBool32 depthStencilSwizzleOneSupport; + VkBool32 polygonModePointSize; + VkBool32 nonStrictSinglePixelWideLinesUseParallelogram; + VkBool32 nonStrictWideLinesUseParallelogram; +} VkPhysicalDeviceMaintenance5PropertiesKHR; + +typedef struct VkRenderingAreaInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t viewMask; + uint32_t colorAttachmentCount; + const VkFormat* pColorAttachmentFormats; + VkFormat depthAttachmentFormat; + VkFormat stencilAttachmentFormat; +} VkRenderingAreaInfoKHR; + +typedef struct VkImageSubresource2KHR { + VkStructureType sType; + void* pNext; + VkImageSubresource imageSubresource; +} VkImageSubresource2KHR; + +typedef struct VkDeviceImageSubresourceInfoKHR { + VkStructureType sType; + const void* pNext; + const VkImageCreateInfo* pCreateInfo; + const VkImageSubresource2KHR* pSubresource; +} VkDeviceImageSubresourceInfoKHR; + +typedef struct VkSubresourceLayout2KHR { + VkStructureType sType; + void* pNext; + VkSubresourceLayout subresourceLayout; +} VkSubresourceLayout2KHR; + +typedef struct VkPipelineCreateFlags2CreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags2KHR flags; +} VkPipelineCreateFlags2CreateInfoKHR; + +typedef struct VkBufferUsageFlags2CreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkBufferUsageFlags2KHR usage; +} VkBufferUsageFlags2CreateInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer2KHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType); +typedef void (VKAPI_PTR *PFN_vkGetRenderingAreaGranularityKHR)(VkDevice device, const VkRenderingAreaInfoKHR* pRenderingAreaInfo, VkExtent2D* pGranularity); +typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSubresourceLayoutKHR)(VkDevice device, const VkDeviceImageSubresourceInfoKHR* pInfo, VkSubresourceLayout2KHR* pLayout); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2KHR)(VkDevice device, VkImage image, const VkImageSubresource2KHR* pSubresource, VkSubresourceLayout2KHR* pLayout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer2KHR( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkDeviceSize size, + VkIndexType indexType); + +VKAPI_ATTR void VKAPI_CALL vkGetRenderingAreaGranularityKHR( + VkDevice device, + const VkRenderingAreaInfoKHR* pRenderingAreaInfo, + VkExtent2D* pGranularity); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSubresourceLayoutKHR( + VkDevice device, + const VkDeviceImageSubresourceInfoKHR* pInfo, + VkSubresourceLayout2KHR* pLayout); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2KHR( + VkDevice device, + VkImage image, + const VkImageSubresource2KHR* pSubresource, + VkSubresourceLayout2KHR* pLayout); +#endif + + +// VK_KHR_ray_tracing_position_fetch is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_tracing_position_fetch 1 +#define VK_KHR_RAY_TRACING_POSITION_FETCH_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_POSITION_FETCH_EXTENSION_NAME "VK_KHR_ray_tracing_position_fetch" +typedef struct VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingPositionFetch; +} VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR; + + + +// VK_KHR_pipeline_binary is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_pipeline_binary 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineBinaryKHR) +#define VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR 32U +#define VK_KHR_PIPELINE_BINARY_SPEC_VERSION 1 +#define VK_KHR_PIPELINE_BINARY_EXTENSION_NAME "VK_KHR_pipeline_binary" +typedef struct VkPhysicalDevicePipelineBinaryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineBinaries; +} VkPhysicalDevicePipelineBinaryFeaturesKHR; + +typedef struct VkPhysicalDevicePipelineBinaryPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 pipelineBinaryInternalCache; + VkBool32 pipelineBinaryInternalCacheControl; + VkBool32 pipelineBinaryPrefersInternalCache; + VkBool32 pipelineBinaryPrecompiledInternalCache; + VkBool32 pipelineBinaryCompressedData; +} VkPhysicalDevicePipelineBinaryPropertiesKHR; + +typedef struct VkDevicePipelineBinaryInternalCacheControlKHR { + VkStructureType sType; + const void* pNext; + VkBool32 disableInternalCache; +} VkDevicePipelineBinaryInternalCacheControlKHR; + +typedef struct VkPipelineBinaryKeyKHR { + VkStructureType sType; + void* pNext; + uint32_t keySize; + uint8_t key[VK_MAX_PIPELINE_BINARY_KEY_SIZE_KHR]; +} VkPipelineBinaryKeyKHR; + +typedef struct VkPipelineBinaryDataKHR { + size_t dataSize; + void* pData; +} VkPipelineBinaryDataKHR; + +typedef struct VkPipelineBinaryKeysAndDataKHR { + uint32_t binaryCount; + const VkPipelineBinaryKeyKHR* pPipelineBinaryKeys; + const VkPipelineBinaryDataKHR* pPipelineBinaryData; +} VkPipelineBinaryKeysAndDataKHR; + +typedef struct VkPipelineCreateInfoKHR { + VkStructureType sType; + void* pNext; +} VkPipelineCreateInfoKHR; + +typedef struct VkPipelineBinaryCreateInfoKHR { + VkStructureType sType; + const void* pNext; + const VkPipelineBinaryKeysAndDataKHR* pKeysAndDataInfo; + VkPipeline pipeline; + const VkPipelineCreateInfoKHR* pPipelineCreateInfo; +} VkPipelineBinaryCreateInfoKHR; + +typedef struct VkPipelineBinaryInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t binaryCount; + const VkPipelineBinaryKHR* pPipelineBinaries; +} VkPipelineBinaryInfoKHR; + +typedef struct VkReleaseCapturedPipelineDataInfoKHR { + VkStructureType sType; + void* pNext; + VkPipeline pipeline; +} VkReleaseCapturedPipelineDataInfoKHR; + +typedef struct VkPipelineBinaryDataInfoKHR { + VkStructureType sType; + void* pNext; + VkPipelineBinaryKHR pipelineBinary; +} VkPipelineBinaryDataInfoKHR; + +typedef struct VkPipelineBinaryHandlesInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t pipelineBinaryCount; + VkPipelineBinaryKHR* pPipelineBinaries; +} VkPipelineBinaryHandlesInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineBinariesKHR)(VkDevice device, const VkPipelineBinaryCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineBinaryHandlesInfoKHR* pBinaries); +typedef void (VKAPI_PTR *PFN_vkDestroyPipelineBinaryKHR)(VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineKeyKHR)(VkDevice device, const VkPipelineCreateInfoKHR* pPipelineCreateInfo, VkPipelineBinaryKeyKHR* pPipelineKey); +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineBinaryDataKHR)(VkDevice device, const VkPipelineBinaryDataInfoKHR* pInfo, VkPipelineBinaryKeyKHR* pPipelineBinaryKey, size_t* pPipelineBinaryDataSize, void* pPipelineBinaryData); +typedef VkResult (VKAPI_PTR *PFN_vkReleaseCapturedPipelineDataKHR)(VkDevice device, const VkReleaseCapturedPipelineDataInfoKHR* pInfo, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineBinariesKHR( + VkDevice device, + const VkPipelineBinaryCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPipelineBinaryHandlesInfoKHR* pBinaries); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineBinaryKHR( + VkDevice device, + VkPipelineBinaryKHR pipelineBinary, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineKeyKHR( + VkDevice device, + const VkPipelineCreateInfoKHR* pPipelineCreateInfo, + VkPipelineBinaryKeyKHR* pPipelineKey); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineBinaryDataKHR( + VkDevice device, + const VkPipelineBinaryDataInfoKHR* pInfo, + VkPipelineBinaryKeyKHR* pPipelineBinaryKey, + size_t* pPipelineBinaryDataSize, + void* pPipelineBinaryData); + +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseCapturedPipelineDataKHR( + VkDevice device, + const VkReleaseCapturedPipelineDataInfoKHR* pInfo, + const VkAllocationCallbacks* pAllocator); +#endif + + +// VK_KHR_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_cooperative_matrix 1 +#define VK_KHR_COOPERATIVE_MATRIX_SPEC_VERSION 2 +#define VK_KHR_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_KHR_cooperative_matrix" + +typedef enum VkComponentTypeKHR { + VK_COMPONENT_TYPE_FLOAT16_KHR = 0, + VK_COMPONENT_TYPE_FLOAT32_KHR = 1, + VK_COMPONENT_TYPE_FLOAT64_KHR = 2, + VK_COMPONENT_TYPE_SINT8_KHR = 3, + VK_COMPONENT_TYPE_SINT16_KHR = 4, + VK_COMPONENT_TYPE_SINT32_KHR = 5, + VK_COMPONENT_TYPE_SINT64_KHR = 6, + VK_COMPONENT_TYPE_UINT8_KHR = 7, + VK_COMPONENT_TYPE_UINT16_KHR = 8, + VK_COMPONENT_TYPE_UINT32_KHR = 9, + VK_COMPONENT_TYPE_UINT64_KHR = 10, + VK_COMPONENT_TYPE_FLOAT16_NV = VK_COMPONENT_TYPE_FLOAT16_KHR, + VK_COMPONENT_TYPE_FLOAT32_NV = VK_COMPONENT_TYPE_FLOAT32_KHR, + VK_COMPONENT_TYPE_FLOAT64_NV = VK_COMPONENT_TYPE_FLOAT64_KHR, + VK_COMPONENT_TYPE_SINT8_NV = VK_COMPONENT_TYPE_SINT8_KHR, + VK_COMPONENT_TYPE_SINT16_NV = VK_COMPONENT_TYPE_SINT16_KHR, + VK_COMPONENT_TYPE_SINT32_NV = VK_COMPONENT_TYPE_SINT32_KHR, + VK_COMPONENT_TYPE_SINT64_NV = VK_COMPONENT_TYPE_SINT64_KHR, + VK_COMPONENT_TYPE_UINT8_NV = VK_COMPONENT_TYPE_UINT8_KHR, + VK_COMPONENT_TYPE_UINT16_NV = VK_COMPONENT_TYPE_UINT16_KHR, + VK_COMPONENT_TYPE_UINT32_NV = VK_COMPONENT_TYPE_UINT32_KHR, + VK_COMPONENT_TYPE_UINT64_NV = VK_COMPONENT_TYPE_UINT64_KHR, + VK_COMPONENT_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkComponentTypeKHR; + +typedef enum VkScopeKHR { + VK_SCOPE_DEVICE_KHR = 1, + VK_SCOPE_WORKGROUP_KHR = 2, + VK_SCOPE_SUBGROUP_KHR = 3, + VK_SCOPE_QUEUE_FAMILY_KHR = 5, + VK_SCOPE_DEVICE_NV = VK_SCOPE_DEVICE_KHR, + VK_SCOPE_WORKGROUP_NV = VK_SCOPE_WORKGROUP_KHR, + VK_SCOPE_SUBGROUP_NV = VK_SCOPE_SUBGROUP_KHR, + VK_SCOPE_QUEUE_FAMILY_NV = VK_SCOPE_QUEUE_FAMILY_KHR, + VK_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkScopeKHR; +typedef struct VkCooperativeMatrixPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t MSize; + uint32_t NSize; + uint32_t KSize; + VkComponentTypeKHR AType; + VkComponentTypeKHR BType; + VkComponentTypeKHR CType; + VkComponentTypeKHR ResultType; + VkBool32 saturatingAccumulation; + VkScopeKHR scope; +} VkCooperativeMatrixPropertiesKHR; + +typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrix; + VkBool32 cooperativeMatrixRobustBufferAccess; +} VkPhysicalDeviceCooperativeMatrixFeaturesKHR; + +typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesKHR { + VkStructureType sType; + void* pNext; + VkShaderStageFlags cooperativeMatrixSupportedStages; +} VkPhysicalDeviceCooperativeMatrixPropertiesKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesKHR* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixPropertiesKHR* pProperties); +#endif + + +// VK_KHR_compute_shader_derivatives is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_compute_shader_derivatives 1 +#define VK_KHR_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 +#define VK_KHR_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_KHR_compute_shader_derivatives" +typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 computeDerivativeGroupQuads; + VkBool32 computeDerivativeGroupLinear; +} VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR; + +typedef struct VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 meshAndTaskShaderDerivatives; +} VkPhysicalDeviceComputeShaderDerivativesPropertiesKHR; + + + +// VK_KHR_video_decode_av1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_decode_av1 1 +#include "vk_video/vulkan_video_codec_av1std.h" +#include "vk_video/vulkan_video_codec_av1std_decode.h" +#define VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR 7U +#define VK_KHR_VIDEO_DECODE_AV1_SPEC_VERSION 1 +#define VK_KHR_VIDEO_DECODE_AV1_EXTENSION_NAME "VK_KHR_video_decode_av1" +typedef struct VkVideoDecodeAV1ProfileInfoKHR { + VkStructureType sType; + const void* pNext; + StdVideoAV1Profile stdProfile; + VkBool32 filmGrainSupport; +} VkVideoDecodeAV1ProfileInfoKHR; + +typedef struct VkVideoDecodeAV1CapabilitiesKHR { + VkStructureType sType; + void* pNext; + StdVideoAV1Level maxLevel; +} VkVideoDecodeAV1CapabilitiesKHR; + +typedef struct VkVideoDecodeAV1SessionParametersCreateInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoAV1SequenceHeader* pStdSequenceHeader; +} VkVideoDecodeAV1SessionParametersCreateInfoKHR; + +typedef struct VkVideoDecodeAV1PictureInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeAV1PictureInfo* pStdPictureInfo; + int32_t referenceNameSlotIndices[VK_MAX_VIDEO_AV1_REFERENCES_PER_FRAME_KHR]; + uint32_t frameHeaderOffset; + uint32_t tileCount; + const uint32_t* pTileOffsets; + const uint32_t* pTileSizes; +} VkVideoDecodeAV1PictureInfoKHR; + +typedef struct VkVideoDecodeAV1DpbSlotInfoKHR { + VkStructureType sType; + const void* pNext; + const StdVideoDecodeAV1ReferenceInfo* pStdReferenceInfo; +} VkVideoDecodeAV1DpbSlotInfoKHR; + + + +// VK_KHR_video_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_video_maintenance1 1 +#define VK_KHR_VIDEO_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_KHR_VIDEO_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_video_maintenance1" +typedef struct VkPhysicalDeviceVideoMaintenance1FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 videoMaintenance1; +} VkPhysicalDeviceVideoMaintenance1FeaturesKHR; + +typedef struct VkVideoInlineQueryInfoKHR { + VkStructureType sType; + const void* pNext; + VkQueryPool queryPool; + uint32_t firstQuery; + uint32_t queryCount; +} VkVideoInlineQueryInfoKHR; + + + +// VK_KHR_vertex_attribute_divisor is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_vertex_attribute_divisor 1 +#define VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 1 +#define VK_KHR_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_KHR_vertex_attribute_divisor" +typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; + VkBool32 supportsNonZeroFirstInstance; +} VkPhysicalDeviceVertexAttributeDivisorPropertiesKHR; + +typedef struct VkVertexInputBindingDivisorDescriptionKHR { + uint32_t binding; + uint32_t divisor; +} VkVertexInputBindingDivisorDescriptionKHR; + +typedef struct VkPipelineVertexInputDivisorStateCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t vertexBindingDivisorCount; + const VkVertexInputBindingDivisorDescriptionKHR* pVertexBindingDivisors; +} VkPipelineVertexInputDivisorStateCreateInfoKHR; + +typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 vertexAttributeInstanceRateDivisor; + VkBool32 vertexAttributeInstanceRateZeroDivisor; +} VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR; + + + +// VK_KHR_load_store_op_none is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_load_store_op_none 1 +#define VK_KHR_LOAD_STORE_OP_NONE_SPEC_VERSION 1 +#define VK_KHR_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_KHR_load_store_op_none" + + +// VK_KHR_shader_float_controls2 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_float_controls2 1 +#define VK_KHR_SHADER_FLOAT_CONTROLS_2_SPEC_VERSION 1 +#define VK_KHR_SHADER_FLOAT_CONTROLS_2_EXTENSION_NAME "VK_KHR_shader_float_controls2" +typedef struct VkPhysicalDeviceShaderFloatControls2FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderFloatControls2; +} VkPhysicalDeviceShaderFloatControls2FeaturesKHR; + + + +// VK_KHR_index_type_uint8 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_index_type_uint8 1 +#define VK_KHR_INDEX_TYPE_UINT8_SPEC_VERSION 1 +#define VK_KHR_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_KHR_index_type_uint8" +typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 indexTypeUint8; +} VkPhysicalDeviceIndexTypeUint8FeaturesKHR; + + + +// VK_KHR_line_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_line_rasterization 1 +#define VK_KHR_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_KHR_LINE_RASTERIZATION_EXTENSION_NAME "VK_KHR_line_rasterization" + +typedef enum VkLineRasterizationModeKHR { + VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR = 0, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR = 1, + VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR = 2, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR = 3, + VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = VK_LINE_RASTERIZATION_MODE_DEFAULT_KHR, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_KHR, + VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = VK_LINE_RASTERIZATION_MODE_BRESENHAM_KHR, + VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_KHR, + VK_LINE_RASTERIZATION_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkLineRasterizationModeKHR; +typedef struct VkPhysicalDeviceLineRasterizationFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rectangularLines; + VkBool32 bresenhamLines; + VkBool32 smoothLines; + VkBool32 stippledRectangularLines; + VkBool32 stippledBresenhamLines; + VkBool32 stippledSmoothLines; +} VkPhysicalDeviceLineRasterizationFeaturesKHR; + +typedef struct VkPhysicalDeviceLineRasterizationPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t lineSubPixelPrecisionBits; +} VkPhysicalDeviceLineRasterizationPropertiesKHR; + +typedef struct VkPipelineRasterizationLineStateCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkLineRasterizationModeKHR lineRasterizationMode; + VkBool32 stippledLineEnable; + uint32_t lineStippleFactor; + uint16_t lineStipplePattern; +} VkPipelineRasterizationLineStateCreateInfoKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleKHR)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleKHR( + VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); +#endif + + +// VK_KHR_calibrated_timestamps is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_calibrated_timestamps 1 +#define VK_KHR_CALIBRATED_TIMESTAMPS_SPEC_VERSION 1 +#define VK_KHR_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_KHR_calibrated_timestamps" + +typedef enum VkTimeDomainKHR { + VK_TIME_DOMAIN_DEVICE_KHR = 0, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR = 1, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR = 2, + VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR = 3, + VK_TIME_DOMAIN_DEVICE_EXT = VK_TIME_DOMAIN_DEVICE_KHR, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = VK_TIME_DOMAIN_CLOCK_MONOTONIC_KHR, + VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_KHR, + VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_KHR, + VK_TIME_DOMAIN_MAX_ENUM_KHR = 0x7FFFFFFF +} VkTimeDomainKHR; +typedef struct VkCalibratedTimestampInfoKHR { + VkStructureType sType; + const void* pNext; + VkTimeDomainKHR timeDomain; +} VkCalibratedTimestampInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains); +typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsKHR)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainKHR* pTimeDomains); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsKHR( + VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation); +#endif + + +// VK_KHR_shader_expect_assume is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_expect_assume 1 +#define VK_KHR_SHADER_EXPECT_ASSUME_SPEC_VERSION 1 +#define VK_KHR_SHADER_EXPECT_ASSUME_EXTENSION_NAME "VK_KHR_shader_expect_assume" +typedef struct VkPhysicalDeviceShaderExpectAssumeFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderExpectAssume; +} VkPhysicalDeviceShaderExpectAssumeFeaturesKHR; + + + +// VK_KHR_maintenance6 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance6 1 +#define VK_KHR_MAINTENANCE_6_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_6_EXTENSION_NAME "VK_KHR_maintenance6" +typedef struct VkPhysicalDeviceMaintenance6FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 maintenance6; +} VkPhysicalDeviceMaintenance6FeaturesKHR; + +typedef struct VkPhysicalDeviceMaintenance6PropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 blockTexelViewCompatibleMultipleLayers; + uint32_t maxCombinedImageSamplerDescriptorCount; + VkBool32 fragmentShadingRateClampCombinerInputs; +} VkPhysicalDeviceMaintenance6PropertiesKHR; + +typedef struct VkBindMemoryStatusKHR { + VkStructureType sType; + const void* pNext; + VkResult* pResult; +} VkBindMemoryStatusKHR; + +typedef struct VkBindDescriptorSetsInfoKHR { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t firstSet; + uint32_t descriptorSetCount; + const VkDescriptorSet* pDescriptorSets; + uint32_t dynamicOffsetCount; + const uint32_t* pDynamicOffsets; +} VkBindDescriptorSetsInfoKHR; + +typedef struct VkPushConstantsInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipelineLayout layout; + VkShaderStageFlags stageFlags; + uint32_t offset; + uint32_t size; + const void* pValues; +} VkPushConstantsInfoKHR; + +typedef struct VkPushDescriptorSetInfoKHR { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t set; + uint32_t descriptorWriteCount; + const VkWriteDescriptorSet* pDescriptorWrites; +} VkPushDescriptorSetInfoKHR; + +typedef struct VkPushDescriptorSetWithTemplateInfoKHR { + VkStructureType sType; + const void* pNext; + VkDescriptorUpdateTemplate descriptorUpdateTemplate; + VkPipelineLayout layout; + uint32_t set; + const void* pData; +} VkPushDescriptorSetWithTemplateInfoKHR; + +typedef struct VkSetDescriptorBufferOffsetsInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t firstSet; + uint32_t setCount; + const uint32_t* pBufferIndices; + const VkDeviceSize* pOffsets; +} VkSetDescriptorBufferOffsetsInfoEXT; + +typedef struct VkBindDescriptorBufferEmbeddedSamplersInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags stageFlags; + VkPipelineLayout layout; + uint32_t set; +} VkBindDescriptorBufferEmbeddedSamplersInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets2KHR)(VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfoKHR* pBindDescriptorSetsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushConstants2KHR)(VkCommandBuffer commandBuffer, const VkPushConstantsInfoKHR* pPushConstantsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSet2KHR)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfoKHR* pPushDescriptorSetInfo); +typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplate2KHR)(VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfoKHR* pPushDescriptorSetWithTemplateInfo); +typedef void (VKAPI_PTR *PFN_vkCmdSetDescriptorBufferOffsets2EXT)(VkCommandBuffer commandBuffer, const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT)(VkCommandBuffer commandBuffer, const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets2KHR( + VkCommandBuffer commandBuffer, + const VkBindDescriptorSetsInfoKHR* pBindDescriptorSetsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants2KHR( + VkCommandBuffer commandBuffer, + const VkPushConstantsInfoKHR* pPushConstantsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSet2KHR( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetInfoKHR* pPushDescriptorSetInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplate2KHR( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfoKHR* pPushDescriptorSetWithTemplateInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDescriptorBufferOffsets2EXT( + VkCommandBuffer commandBuffer, + const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + VkCommandBuffer commandBuffer, + const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo); +#endif + + +// VK_KHR_shader_relaxed_extended_instruction is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_shader_relaxed_extended_instruction 1 +#define VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_SPEC_VERSION 1 +#define VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_EXTENSION_NAME "VK_KHR_shader_relaxed_extended_instruction" +typedef struct VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 shaderRelaxedExtendedInstruction; +} VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR; + + + +// VK_KHR_maintenance7 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_maintenance7 1 +#define VK_KHR_MAINTENANCE_7_SPEC_VERSION 1 +#define VK_KHR_MAINTENANCE_7_EXTENSION_NAME "VK_KHR_maintenance7" + +typedef enum VkPhysicalDeviceLayeredApiKHR { + VK_PHYSICAL_DEVICE_LAYERED_API_VULKAN_KHR = 0, + VK_PHYSICAL_DEVICE_LAYERED_API_D3D12_KHR = 1, + VK_PHYSICAL_DEVICE_LAYERED_API_METAL_KHR = 2, + VK_PHYSICAL_DEVICE_LAYERED_API_OPENGL_KHR = 3, + VK_PHYSICAL_DEVICE_LAYERED_API_OPENGLES_KHR = 4, + VK_PHYSICAL_DEVICE_LAYERED_API_MAX_ENUM_KHR = 0x7FFFFFFF +} VkPhysicalDeviceLayeredApiKHR; +typedef struct VkPhysicalDeviceMaintenance7FeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 maintenance7; +} VkPhysicalDeviceMaintenance7FeaturesKHR; + +typedef struct VkPhysicalDeviceMaintenance7PropertiesKHR { + VkStructureType sType; + void* pNext; + VkBool32 robustFragmentShadingRateAttachmentAccess; + VkBool32 separateDepthStencilAttachmentAccess; + uint32_t maxDescriptorSetTotalUniformBuffersDynamic; + uint32_t maxDescriptorSetTotalStorageBuffersDynamic; + uint32_t maxDescriptorSetTotalBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindTotalUniformBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindTotalStorageBuffersDynamic; + uint32_t maxDescriptorSetUpdateAfterBindTotalBuffersDynamic; +} VkPhysicalDeviceMaintenance7PropertiesKHR; + +typedef struct VkPhysicalDeviceLayeredApiPropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t vendorID; + uint32_t deviceID; + VkPhysicalDeviceLayeredApiKHR layeredAPI; + char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; +} VkPhysicalDeviceLayeredApiPropertiesKHR; + +typedef struct VkPhysicalDeviceLayeredApiPropertiesListKHR { + VkStructureType sType; + void* pNext; + uint32_t layeredApiCount; + VkPhysicalDeviceLayeredApiPropertiesKHR* pLayeredApis; +} VkPhysicalDeviceLayeredApiPropertiesListKHR; + +typedef struct VkPhysicalDeviceLayeredApiVulkanPropertiesKHR { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceProperties2 properties; +} VkPhysicalDeviceLayeredApiVulkanPropertiesKHR; + + + +// VK_EXT_debug_report is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_debug_report 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) +#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 10 +#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" + +typedef enum VkDebugReportObjectTypeEXT { + VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, + VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, + VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, + VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, + VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, + VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, + VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, + VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, + VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, + VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, + VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, + VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, + VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, + VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, + VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, + VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, + VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, + VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT = 1000029000, + VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000, + VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000, + VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_MODULE_NV_EXT = 1000307000, + VK_DEBUG_REPORT_OBJECT_TYPE_CUDA_FUNCTION_NV_EXT = 1000307001, + VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000, + // VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT is a deprecated alias + VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT, + // VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT is a deprecated alias + VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT, + VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportObjectTypeEXT; + +typedef enum VkDebugReportFlagBitsEXT { + VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, + VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, + VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, + VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, + VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, + VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugReportFlagBitsEXT; +typedef VkFlags VkDebugReportFlagsEXT; +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage, + void* pUserData); + +typedef struct VkDebugReportCallbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportFlagsEXT flags; + PFN_vkDebugReportCallbackEXT pfnCallback; + void* pUserData; +} VkDebugReportCallbackCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( + VkInstance instance, + const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugReportCallbackEXT* pCallback); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( + VkInstance instance, + VkDebugReportCallbackEXT callback, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( + VkInstance instance, + VkDebugReportFlagsEXT flags, + VkDebugReportObjectTypeEXT objectType, + uint64_t object, + size_t location, + int32_t messageCode, + const char* pLayerPrefix, + const char* pMessage); +#endif + + +// VK_NV_glsl_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_glsl_shader 1 +#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 +#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" + + +// VK_EXT_depth_range_unrestricted is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_range_unrestricted 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 +#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" + + +// VK_IMG_filter_cubic is a preprocessor guard. Do not pass it to API calls. +#define VK_IMG_filter_cubic 1 +#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 +#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" + + +// VK_AMD_rasterization_order is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_rasterization_order 1 +#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 +#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" + +typedef enum VkRasterizationOrderAMD { + VK_RASTERIZATION_ORDER_STRICT_AMD = 0, + VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, + VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF +} VkRasterizationOrderAMD; +typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { + VkStructureType sType; + const void* pNext; + VkRasterizationOrderAMD rasterizationOrder; +} VkPipelineRasterizationStateRasterizationOrderAMD; + + + +// VK_AMD_shader_trinary_minmax is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_trinary_minmax 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 +#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" + + +// VK_AMD_shader_explicit_vertex_parameter is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_explicit_vertex_parameter 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 +#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" + + +// VK_EXT_debug_marker is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_debug_marker 1 +#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 +#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" +typedef struct VkDebugMarkerObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + const char* pObjectName; +} VkDebugMarkerObjectNameInfoEXT; + +typedef struct VkDebugMarkerObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugReportObjectTypeEXT objectType; + uint64_t object; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugMarkerObjectTagInfoEXT; + +typedef struct VkDebugMarkerMarkerInfoEXT { + VkStructureType sType; + const void* pNext; + const char* pMarkerName; + float color[4]; +} VkDebugMarkerMarkerInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo); +typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( + VkDevice device, + const VkDebugMarkerObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( + VkDevice device, + const VkDebugMarkerObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +#endif + + +// VK_AMD_gcn_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_gcn_shader 1 +#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 +#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" + + +// VK_NV_dedicated_allocation is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_dedicated_allocation 1 +#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" +typedef struct VkDedicatedAllocationImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationImageCreateInfoNV; + +typedef struct VkDedicatedAllocationBufferCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 dedicatedAllocation; +} VkDedicatedAllocationBufferCreateInfoNV; + +typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkImage image; + VkBuffer buffer; +} VkDedicatedAllocationMemoryAllocateInfoNV; + + + +// VK_EXT_transform_feedback is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_transform_feedback 1 +#define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback" +typedef VkFlags VkPipelineRasterizationStateStreamCreateFlagsEXT; +typedef struct VkPhysicalDeviceTransformFeedbackFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 transformFeedback; + VkBool32 geometryStreams; +} VkPhysicalDeviceTransformFeedbackFeaturesEXT; + +typedef struct VkPhysicalDeviceTransformFeedbackPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxTransformFeedbackStreams; + uint32_t maxTransformFeedbackBuffers; + VkDeviceSize maxTransformFeedbackBufferSize; + uint32_t maxTransformFeedbackStreamDataSize; + uint32_t maxTransformFeedbackBufferDataSize; + uint32_t maxTransformFeedbackBufferDataStride; + VkBool32 transformFeedbackQueries; + VkBool32 transformFeedbackStreamsLinesTriangles; + VkBool32 transformFeedbackRasterizationStreamSelect; + VkBool32 transformFeedbackDraw; +} VkPhysicalDeviceTransformFeedbackPropertiesEXT; + +typedef struct VkPipelineRasterizationStateStreamCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationStateStreamCreateFlagsEXT flags; + uint32_t rasterizationStream; +} VkPipelineRasterizationStateStreamCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBindTransformFeedbackBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes); +typedef void (VKAPI_PTR *PFN_vkCmdBeginTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdEndTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBeginQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdEndQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectByteCountEXT)(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindTransformFeedbackBuffersEXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndTransformFeedbackEXT( + VkCommandBuffer commandBuffer, + uint32_t firstCounterBuffer, + uint32_t counterBufferCount, + const VkBuffer* pCounterBuffers, + const VkDeviceSize* pCounterBufferOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndQueryIndexedEXT( + VkCommandBuffer commandBuffer, + VkQueryPool queryPool, + uint32_t query, + uint32_t index); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT( + VkCommandBuffer commandBuffer, + uint32_t instanceCount, + uint32_t firstInstance, + VkBuffer counterBuffer, + VkDeviceSize counterBufferOffset, + uint32_t counterOffset, + uint32_t vertexStride); +#endif + + +// VK_NVX_binary_import is a preprocessor guard. Do not pass it to API calls. +#define VK_NVX_binary_import 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX) +#define VK_NVX_BINARY_IMPORT_SPEC_VERSION 1 +#define VK_NVX_BINARY_IMPORT_EXTENSION_NAME "VK_NVX_binary_import" +typedef struct VkCuModuleCreateInfoNVX { + VkStructureType sType; + const void* pNext; + size_t dataSize; + const void* pData; +} VkCuModuleCreateInfoNVX; + +typedef struct VkCuFunctionCreateInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuModuleNVX module; + const char* pName; +} VkCuFunctionCreateInfoNVX; + +typedef struct VkCuLaunchInfoNVX { + VkStructureType sType; + const void* pNext; + VkCuFunctionNVX function; + uint32_t gridDimX; + uint32_t gridDimY; + uint32_t gridDimZ; + uint32_t blockDimX; + uint32_t blockDimY; + uint32_t blockDimZ; + uint32_t sharedMemBytes; + size_t paramCount; + const void* const * pParams; + size_t extraCount; + const void* const * pExtras; +} VkCuLaunchInfoNVX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuModuleNVX)(VkDevice device, const VkCuModuleCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuModuleNVX* pModule); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCuFunctionNVX)(VkDevice device, const VkCuFunctionCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuFunctionNVX* pFunction); +typedef void (VKAPI_PTR *PFN_vkDestroyCuModuleNVX)(VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyCuFunctionNVX)(VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdCuLaunchKernelNVX)(VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX* pLaunchInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuModuleNVX( + VkDevice device, + const VkCuModuleCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuModuleNVX* pModule); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuFunctionNVX( + VkDevice device, + const VkCuFunctionCreateInfoNVX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCuFunctionNVX* pFunction); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuModuleNVX( + VkDevice device, + VkCuModuleNVX module, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCuFunctionNVX( + VkDevice device, + VkCuFunctionNVX function, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX( + VkCommandBuffer commandBuffer, + const VkCuLaunchInfoNVX* pLaunchInfo); +#endif + + +// VK_NVX_image_view_handle is a preprocessor guard. Do not pass it to API calls. +#define VK_NVX_image_view_handle 1 +#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 2 +#define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle" +typedef struct VkImageViewHandleInfoNVX { + VkStructureType sType; + const void* pNext; + VkImageView imageView; + VkDescriptorType descriptorType; + VkSampler sampler; +} VkImageViewHandleInfoNVX; + +typedef struct VkImageViewAddressPropertiesNVX { + VkStructureType sType; + void* pNext; + VkDeviceAddress deviceAddress; + VkDeviceSize size; +} VkImageViewAddressPropertiesNVX; + +typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX( + VkDevice device, + const VkImageViewHandleInfoNVX* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX( + VkDevice device, + VkImageView imageView, + VkImageViewAddressPropertiesNVX* pProperties); +#endif + + +// VK_AMD_draw_indirect_count is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_draw_indirect_count 1 +#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 2 +#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +// VK_AMD_negative_viewport_height is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_negative_viewport_height 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 +#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" + + +// VK_AMD_gpu_shader_half_float is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_gpu_shader_half_float 1 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 2 +#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" + + +// VK_AMD_shader_ballot is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_ballot 1 +#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 +#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" + + +// VK_AMD_texture_gather_bias_lod is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_texture_gather_bias_lod 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 +#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" +typedef struct VkTextureLODGatherFormatPropertiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 supportsTextureGatherLODBiasAMD; +} VkTextureLODGatherFormatPropertiesAMD; + + + +// VK_AMD_shader_info is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_info 1 +#define VK_AMD_SHADER_INFO_SPEC_VERSION 1 +#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info" + +typedef enum VkShaderInfoTypeAMD { + VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0, + VK_SHADER_INFO_TYPE_BINARY_AMD = 1, + VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2, + VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderInfoTypeAMD; +typedef struct VkShaderResourceUsageAMD { + uint32_t numUsedVgprs; + uint32_t numUsedSgprs; + uint32_t ldsSizePerLocalWorkGroup; + size_t ldsUsageSizeInBytes; + size_t scratchMemUsageInBytes; +} VkShaderResourceUsageAMD; + +typedef struct VkShaderStatisticsInfoAMD { + VkShaderStageFlags shaderStageMask; + VkShaderResourceUsageAMD resourceUsage; + uint32_t numPhysicalVgprs; + uint32_t numPhysicalSgprs; + uint32_t numAvailableVgprs; + uint32_t numAvailableSgprs; + uint32_t computeWorkGroupSize[3]; +} VkShaderStatisticsInfoAMD; + +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD( + VkDevice device, + VkPipeline pipeline, + VkShaderStageFlagBits shaderStage, + VkShaderInfoTypeAMD infoType, + size_t* pInfoSize, + void* pInfo); +#endif + + +// VK_AMD_shader_image_load_store_lod is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_image_load_store_lod 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1 +#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod" + + +// VK_NV_corner_sampled_image is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_corner_sampled_image 1 +#define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2 +#define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image" +typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cornerSampledImage; +} VkPhysicalDeviceCornerSampledImageFeaturesNV; + + + +// VK_IMG_format_pvrtc is a preprocessor guard. Do not pass it to API calls. +#define VK_IMG_format_pvrtc 1 +#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 +#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" + + +// VK_NV_external_memory_capabilities is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory_capabilities 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" + +typedef enum VkExternalMemoryHandleTypeFlagBitsNV { + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, + VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryHandleTypeFlagBitsNV; +typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; + +typedef enum VkExternalMemoryFeatureFlagBitsNV { + VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, + VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, + VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, + VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkExternalMemoryFeatureFlagBitsNV; +typedef VkFlags VkExternalMemoryFeatureFlagsNV; +typedef struct VkExternalImageFormatPropertiesNV { + VkImageFormatProperties imageFormatProperties; + VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; + VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; + VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; +} VkExternalImageFormatPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( + VkPhysicalDevice physicalDevice, + VkFormat format, + VkImageType type, + VkImageTiling tiling, + VkImageUsageFlags usage, + VkImageCreateFlags flags, + VkExternalMemoryHandleTypeFlagsNV externalHandleType, + VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties); +#endif + + +// VK_NV_external_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory 1 +#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" +typedef struct VkExternalMemoryImageCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExternalMemoryImageCreateInfoNV; + +typedef struct VkExportMemoryAllocateInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleTypes; +} VkExportMemoryAllocateInfoNV; + + + +// VK_EXT_validation_flags is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_validation_flags 1 +#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 3 +#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" + +typedef enum VkValidationCheckEXT { + VK_VALIDATION_CHECK_ALL_EXT = 0, + VK_VALIDATION_CHECK_SHADERS_EXT = 1, + VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCheckEXT; +typedef struct VkValidationFlagsEXT { + VkStructureType sType; + const void* pNext; + uint32_t disabledValidationCheckCount; + const VkValidationCheckEXT* pDisabledValidationChecks; +} VkValidationFlagsEXT; + + + +// VK_EXT_shader_subgroup_ballot is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_subgroup_ballot 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" + + +// VK_EXT_shader_subgroup_vote is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_subgroup_vote 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 +#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" + + +// VK_EXT_texture_compression_astc_hdr is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_texture_compression_astc_hdr 1 +#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION 1 +#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr" +typedef VkPhysicalDeviceTextureCompressionASTCHDRFeatures VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT; + + + +// VK_EXT_astc_decode_mode is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_astc_decode_mode 1 +#define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1 +#define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode" +typedef struct VkImageViewASTCDecodeModeEXT { + VkStructureType sType; + const void* pNext; + VkFormat decodeMode; +} VkImageViewASTCDecodeModeEXT; + +typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 decodeModeSharedExponent; +} VkPhysicalDeviceASTCDecodeFeaturesEXT; + + + +// VK_EXT_pipeline_robustness is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_robustness 1 +#define VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_pipeline_robustness" + +typedef enum VkPipelineRobustnessBufferBehaviorEXT { + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT = 0, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT = 1, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT = 2, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT = 3, + VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPipelineRobustnessBufferBehaviorEXT; + +typedef enum VkPipelineRobustnessImageBehaviorEXT { + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT = 0, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT = 1, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT = 2, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT = 3, + VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPipelineRobustnessImageBehaviorEXT; +typedef struct VkPhysicalDevicePipelineRobustnessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelineRobustness; +} VkPhysicalDevicePipelineRobustnessFeaturesEXT; + +typedef struct VkPhysicalDevicePipelineRobustnessPropertiesEXT { + VkStructureType sType; + void* pNext; + VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessStorageBuffers; + VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessUniformBuffers; + VkPipelineRobustnessBufferBehaviorEXT defaultRobustnessVertexInputs; + VkPipelineRobustnessImageBehaviorEXT defaultRobustnessImages; +} VkPhysicalDevicePipelineRobustnessPropertiesEXT; + +typedef struct VkPipelineRobustnessCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRobustnessBufferBehaviorEXT storageBuffers; + VkPipelineRobustnessBufferBehaviorEXT uniformBuffers; + VkPipelineRobustnessBufferBehaviorEXT vertexInputs; + VkPipelineRobustnessImageBehaviorEXT images; +} VkPipelineRobustnessCreateInfoEXT; + + + +// VK_EXT_conditional_rendering is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_conditional_rendering 1 +#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2 +#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering" + +typedef enum VkConditionalRenderingFlagBitsEXT { + VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT = 0x00000001, + VK_CONDITIONAL_RENDERING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConditionalRenderingFlagBitsEXT; +typedef VkFlags VkConditionalRenderingFlagsEXT; +typedef struct VkConditionalRenderingBeginInfoEXT { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; + VkDeviceSize offset; + VkConditionalRenderingFlagsEXT flags; +} VkConditionalRenderingBeginInfoEXT; + +typedef struct VkPhysicalDeviceConditionalRenderingFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 conditionalRendering; + VkBool32 inheritedConditionalRendering; +} VkPhysicalDeviceConditionalRenderingFeaturesEXT; + +typedef struct VkCommandBufferInheritanceConditionalRenderingInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 conditionalRenderingEnable; +} VkCommandBufferInheritanceConditionalRenderingInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdBeginConditionalRenderingEXT)(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); +typedef void (VKAPI_PTR *PFN_vkCmdEndConditionalRenderingEXT)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBeginConditionalRenderingEXT( + VkCommandBuffer commandBuffer, + const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT( + VkCommandBuffer commandBuffer); +#endif + + +// VK_NV_clip_space_w_scaling is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_clip_space_w_scaling 1 +#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 +#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" +typedef struct VkViewportWScalingNV { + float xcoeff; + float ycoeff; +} VkViewportWScalingNV; + +typedef struct VkPipelineViewportWScalingStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportWScalingEnable; + uint32_t viewportCount; + const VkViewportWScalingNV* pViewportWScalings; +} VkPipelineViewportWScalingStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportWScalingNV* pViewportWScalings); +#endif + + +// VK_EXT_direct_mode_display is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_direct_mode_display 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" +typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); +#endif + + +// VK_EXT_display_surface_counter is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_display_surface_counter 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" + +typedef enum VkSurfaceCounterFlagBitsEXT { + VK_SURFACE_COUNTER_VBLANK_BIT_EXT = 0x00000001, + // VK_SURFACE_COUNTER_VBLANK_EXT is a deprecated alias + VK_SURFACE_COUNTER_VBLANK_EXT = VK_SURFACE_COUNTER_VBLANK_BIT_EXT, + VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSurfaceCounterFlagBitsEXT; +typedef VkFlags VkSurfaceCounterFlagsEXT; +typedef struct VkSurfaceCapabilities2EXT { + VkStructureType sType; + void* pNext; + uint32_t minImageCount; + uint32_t maxImageCount; + VkExtent2D currentExtent; + VkExtent2D minImageExtent; + VkExtent2D maxImageExtent; + uint32_t maxImageArrayLayers; + VkSurfaceTransformFlagsKHR supportedTransforms; + VkSurfaceTransformFlagBitsKHR currentTransform; + VkCompositeAlphaFlagsKHR supportedCompositeAlpha; + VkImageUsageFlags supportedUsageFlags; + VkSurfaceCounterFlagsEXT supportedSurfaceCounters; +} VkSurfaceCapabilities2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( + VkPhysicalDevice physicalDevice, + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT* pSurfaceCapabilities); +#endif + + +// VK_EXT_display_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_display_control 1 +#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" + +typedef enum VkDisplayPowerStateEXT { + VK_DISPLAY_POWER_STATE_OFF_EXT = 0, + VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, + VK_DISPLAY_POWER_STATE_ON_EXT = 2, + VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayPowerStateEXT; + +typedef enum VkDeviceEventTypeEXT { + VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, + VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceEventTypeEXT; + +typedef enum VkDisplayEventTypeEXT { + VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, + VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDisplayEventTypeEXT; +typedef struct VkDisplayPowerInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayPowerStateEXT powerState; +} VkDisplayPowerInfoEXT; + +typedef struct VkDeviceEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceEventTypeEXT deviceEvent; +} VkDeviceEventInfoEXT; + +typedef struct VkDisplayEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkDisplayEventTypeEXT displayEvent; +} VkDisplayEventInfoEXT; + +typedef struct VkSwapchainCounterCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkSurfaceCounterFlagsEXT surfaceCounters; +} VkSwapchainCounterCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence); +typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayPowerInfoEXT* pDisplayPowerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( + VkDevice device, + const VkDeviceEventInfoEXT* pDeviceEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( + VkDevice device, + VkDisplayKHR display, + const VkDisplayEventInfoEXT* pDisplayEventInfo, + const VkAllocationCallbacks* pAllocator, + VkFence* pFence); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( + VkDevice device, + VkSwapchainKHR swapchain, + VkSurfaceCounterFlagBitsEXT counter, + uint64_t* pCounterValue); +#endif + + +// VK_GOOGLE_display_timing is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_display_timing 1 +#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 +#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" +typedef struct VkRefreshCycleDurationGOOGLE { + uint64_t refreshDuration; +} VkRefreshCycleDurationGOOGLE; + +typedef struct VkPastPresentationTimingGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; + uint64_t actualPresentTime; + uint64_t earliestPresentTime; + uint64_t presentMargin; +} VkPastPresentationTimingGOOGLE; + +typedef struct VkPresentTimeGOOGLE { + uint32_t presentID; + uint64_t desiredPresentTime; +} VkPresentTimeGOOGLE; + +typedef struct VkPresentTimesInfoGOOGLE { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentTimeGOOGLE* pTimes; +} VkPresentTimesInfoGOOGLE; + +typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( + VkDevice device, + VkSwapchainKHR swapchain, + uint32_t* pPresentationTimingCount, + VkPastPresentationTimingGOOGLE* pPresentationTimings); +#endif + + +// VK_NV_sample_mask_override_coverage is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_sample_mask_override_coverage 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 +#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" + + +// VK_NV_geometry_shader_passthrough is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_geometry_shader_passthrough 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 +#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" + + +// VK_NV_viewport_array2 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_viewport_array2 1 +#define VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME "VK_NV_viewport_array2" +// VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION is a deprecated alias +#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION +// VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME is a deprecated alias +#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME + + +// VK_NVX_multiview_per_view_attributes is a preprocessor guard. Do not pass it to API calls. +#define VK_NVX_multiview_per_view_attributes 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 +#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" +typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { + VkStructureType sType; + void* pNext; + VkBool32 perViewPositionAllComponents; +} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; + + + +// VK_NV_viewport_swizzle is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_viewport_swizzle 1 +#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 +#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" + +typedef enum VkViewportCoordinateSwizzleNV { + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, + VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, + VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, + VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF +} VkViewportCoordinateSwizzleNV; +typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; +typedef struct VkViewportSwizzleNV { + VkViewportCoordinateSwizzleNV x; + VkViewportCoordinateSwizzleNV y; + VkViewportCoordinateSwizzleNV z; + VkViewportCoordinateSwizzleNV w; +} VkViewportSwizzleNV; + +typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineViewportSwizzleStateCreateFlagsNV flags; + uint32_t viewportCount; + const VkViewportSwizzleNV* pViewportSwizzles; +} VkPipelineViewportSwizzleStateCreateInfoNV; + + + +// VK_EXT_discard_rectangles is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_discard_rectangles 1 +#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 2 +#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" + +typedef enum VkDiscardRectangleModeEXT { + VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, + VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, + VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDiscardRectangleModeEXT; +typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxDiscardRectangles; +} VkPhysicalDeviceDiscardRectanglePropertiesEXT; + +typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineDiscardRectangleStateCreateFlagsEXT flags; + VkDiscardRectangleModeEXT discardRectangleMode; + uint32_t discardRectangleCount; + const VkRect2D* pDiscardRectangles; +} VkPipelineDiscardRectangleStateCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles); +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleModeEXT)(VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( + VkCommandBuffer commandBuffer, + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D* pDiscardRectangles); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 discardRectangleEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleModeEXT( + VkCommandBuffer commandBuffer, + VkDiscardRectangleModeEXT discardRectangleMode); +#endif + + +// VK_EXT_conservative_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_conservative_rasterization 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization" + +typedef enum VkConservativeRasterizationModeEXT { + VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0, + VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1, + VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2, + VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkConservativeRasterizationModeEXT; +typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT { + VkStructureType sType; + void* pNext; + float primitiveOverestimationSize; + float maxExtraPrimitiveOverestimationSize; + float extraPrimitiveOverestimationSizeGranularity; + VkBool32 primitiveUnderestimation; + VkBool32 conservativePointAndLineRasterization; + VkBool32 degenerateTrianglesRasterized; + VkBool32 degenerateLinesRasterized; + VkBool32 fullyCoveredFragmentShaderInputVariable; + VkBool32 conservativeRasterizationPostDepthCoverage; +} VkPhysicalDeviceConservativeRasterizationPropertiesEXT; + +typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationConservativeStateCreateFlagsEXT flags; + VkConservativeRasterizationModeEXT conservativeRasterizationMode; + float extraPrimitiveOverestimationSize; +} VkPipelineRasterizationConservativeStateCreateInfoEXT; + + + +// VK_EXT_depth_clip_enable is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clip_enable 1 +#define VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME "VK_EXT_depth_clip_enable" +typedef VkFlags VkPipelineRasterizationDepthClipStateCreateFlagsEXT; +typedef struct VkPhysicalDeviceDepthClipEnableFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClipEnable; +} VkPhysicalDeviceDepthClipEnableFeaturesEXT; + +typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipelineRasterizationDepthClipStateCreateFlagsEXT flags; + VkBool32 depthClipEnable; +} VkPipelineRasterizationDepthClipStateCreateInfoEXT; + + + +// VK_EXT_swapchain_colorspace is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_swapchain_colorspace 1 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 5 +#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" + + +// VK_EXT_hdr_metadata is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_hdr_metadata 1 +#define VK_EXT_HDR_METADATA_SPEC_VERSION 3 +#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" +typedef struct VkXYColorEXT { + float x; + float y; +} VkXYColorEXT; + +typedef struct VkHdrMetadataEXT { + VkStructureType sType; + const void* pNext; + VkXYColorEXT displayPrimaryRed; + VkXYColorEXT displayPrimaryGreen; + VkXYColorEXT displayPrimaryBlue; + VkXYColorEXT whitePoint; + float maxLuminance; + float minLuminance; + float maxContentLightLevel; + float maxFrameAverageLightLevel; +} VkHdrMetadataEXT; + +typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( + VkDevice device, + uint32_t swapchainCount, + const VkSwapchainKHR* pSwapchains, + const VkHdrMetadataEXT* pMetadata); +#endif + + +// VK_IMG_relaxed_line_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_IMG_relaxed_line_rasterization 1 +#define VK_IMG_RELAXED_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_IMG_RELAXED_LINE_RASTERIZATION_EXTENSION_NAME "VK_IMG_relaxed_line_rasterization" +typedef struct VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG { + VkStructureType sType; + void* pNext; + VkBool32 relaxedLineRasterization; +} VkPhysicalDeviceRelaxedLineRasterizationFeaturesIMG; + + + +// VK_EXT_external_memory_dma_buf is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_dma_buf 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf" + + +// VK_EXT_queue_family_foreign is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_queue_family_foreign 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1 +#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign" +#define VK_QUEUE_FAMILY_FOREIGN_EXT (~2U) + + +// VK_EXT_debug_utils is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_debug_utils 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) +#define VK_EXT_DEBUG_UTILS_SPEC_VERSION 2 +#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils" +typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT; + +typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000, + VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageSeverityFlagBitsEXT; + +typedef enum VkDebugUtilsMessageTypeFlagBitsEXT { + VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001, + VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002, + VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004, + VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT = 0x00000008, + VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDebugUtilsMessageTypeFlagBitsEXT; +typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT; +typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT; +typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT; +typedef struct VkDebugUtilsLabelEXT { + VkStructureType sType; + const void* pNext; + const char* pLabelName; + float color[4]; +} VkDebugUtilsLabelEXT; + +typedef struct VkDebugUtilsObjectNameInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + const char* pObjectName; +} VkDebugUtilsObjectNameInfoEXT; + +typedef struct VkDebugUtilsMessengerCallbackDataEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCallbackDataFlagsEXT flags; + const char* pMessageIdName; + int32_t messageIdNumber; + const char* pMessage; + uint32_t queueLabelCount; + const VkDebugUtilsLabelEXT* pQueueLabels; + uint32_t cmdBufLabelCount; + const VkDebugUtilsLabelEXT* pCmdBufLabels; + uint32_t objectCount; + const VkDebugUtilsObjectNameInfoEXT* pObjects; +} VkDebugUtilsMessengerCallbackDataEXT; + +typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)( + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDebugUtilsMessengerCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDebugUtilsMessengerCreateFlagsEXT flags; + VkDebugUtilsMessageSeverityFlagsEXT messageSeverity; + VkDebugUtilsMessageTypeFlagsEXT messageType; + PFN_vkDebugUtilsMessengerCallbackEXT pfnUserCallback; + void* pUserData; +} VkDebugUtilsMessengerCreateInfoEXT; + +typedef struct VkDebugUtilsObjectTagInfoEXT { + VkStructureType sType; + const void* pNext; + VkObjectType objectType; + uint64_t objectHandle; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkDebugUtilsObjectTagInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo); +typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue); +typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger); +typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT( + VkDevice device, + const VkDebugUtilsObjectNameInfoEXT* pNameInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT( + VkDevice device, + const VkDebugUtilsObjectTagInfoEXT* pTagInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT( + VkQueue queue); + +VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer, + const VkDebugUtilsLabelEXT* pLabelInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT( + VkInstance instance, + const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDebugUtilsMessengerEXT* pMessenger); + +VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT( + VkInstance instance, + VkDebugUtilsMessengerEXT messenger, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT( + VkInstance instance, + VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, + VkDebugUtilsMessageTypeFlagsEXT messageTypes, + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); +#endif + + +// VK_EXT_sampler_filter_minmax is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_sampler_filter_minmax 1 +#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2 +#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" +typedef VkSamplerReductionMode VkSamplerReductionModeEXT; + +typedef VkSamplerReductionModeCreateInfo VkSamplerReductionModeCreateInfoEXT; + +typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; + + + +// VK_AMD_gpu_shader_int16 is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_gpu_shader_int16 1 +#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 2 +#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" + + +// VK_AMD_mixed_attachment_samples is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_mixed_attachment_samples 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 +#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" + + +// VK_AMD_shader_fragment_mask is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_fragment_mask 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 +#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" + + +// VK_EXT_inline_uniform_block is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_inline_uniform_block 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1 +#define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block" +typedef VkPhysicalDeviceInlineUniformBlockFeatures VkPhysicalDeviceInlineUniformBlockFeaturesEXT; + +typedef VkPhysicalDeviceInlineUniformBlockProperties VkPhysicalDeviceInlineUniformBlockPropertiesEXT; + +typedef VkWriteDescriptorSetInlineUniformBlock VkWriteDescriptorSetInlineUniformBlockEXT; + +typedef VkDescriptorPoolInlineUniformBlockCreateInfo VkDescriptorPoolInlineUniformBlockCreateInfoEXT; + + + +// VK_EXT_shader_stencil_export is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_stencil_export 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 +#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" + + +// VK_EXT_sample_locations is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_sample_locations 1 +#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 +#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" +typedef struct VkSampleLocationEXT { + float x; + float y; +} VkSampleLocationEXT; + +typedef struct VkSampleLocationsInfoEXT { + VkStructureType sType; + const void* pNext; + VkSampleCountFlagBits sampleLocationsPerPixel; + VkExtent2D sampleLocationGridSize; + uint32_t sampleLocationsCount; + const VkSampleLocationEXT* pSampleLocations; +} VkSampleLocationsInfoEXT; + +typedef struct VkAttachmentSampleLocationsEXT { + uint32_t attachmentIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkAttachmentSampleLocationsEXT; + +typedef struct VkSubpassSampleLocationsEXT { + uint32_t subpassIndex; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkSubpassSampleLocationsEXT; + +typedef struct VkRenderPassSampleLocationsBeginInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentInitialSampleLocationsCount; + const VkAttachmentSampleLocationsEXT* pAttachmentInitialSampleLocations; + uint32_t postSubpassSampleLocationsCount; + const VkSubpassSampleLocationsEXT* pPostSubpassSampleLocations; +} VkRenderPassSampleLocationsBeginInfoEXT; + +typedef struct VkPipelineSampleLocationsStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 sampleLocationsEnable; + VkSampleLocationsInfoEXT sampleLocationsInfo; +} VkPipelineSampleLocationsStateCreateInfoEXT; + +typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT { + VkStructureType sType; + void* pNext; + VkSampleCountFlags sampleLocationSampleCounts; + VkExtent2D maxSampleLocationGridSize; + float sampleLocationCoordinateRange[2]; + uint32_t sampleLocationSubPixelBits; + VkBool32 variableSampleLocations; +} VkPhysicalDeviceSampleLocationsPropertiesEXT; + +typedef struct VkMultisamplePropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D maxSampleLocationGridSize; +} VkMultisamplePropertiesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo); +typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT( + VkCommandBuffer commandBuffer, + const VkSampleLocationsInfoEXT* pSampleLocationsInfo); + +VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( + VkPhysicalDevice physicalDevice, + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT* pMultisampleProperties); +#endif + + +// VK_EXT_blend_operation_advanced is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_blend_operation_advanced 1 +#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 +#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" + +typedef enum VkBlendOverlapEXT { + VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, + VK_BLEND_OVERLAP_DISJOINT_EXT = 1, + VK_BLEND_OVERLAP_CONJOINT_EXT = 2, + VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBlendOverlapEXT; +typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 advancedBlendCoherentOperations; +} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; + +typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t advancedBlendMaxColorAttachments; + VkBool32 advancedBlendIndependentBlend; + VkBool32 advancedBlendNonPremultipliedSrcColor; + VkBool32 advancedBlendNonPremultipliedDstColor; + VkBool32 advancedBlendCorrelatedOverlap; + VkBool32 advancedBlendAllOperations; +} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; + +typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; +} VkPipelineColorBlendAdvancedStateCreateInfoEXT; + + + +// VK_NV_fragment_coverage_to_color is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fragment_coverage_to_color 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" +typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; +typedef struct VkPipelineCoverageToColorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageToColorStateCreateFlagsNV flags; + VkBool32 coverageToColorEnable; + uint32_t coverageToColorLocation; +} VkPipelineCoverageToColorStateCreateInfoNV; + + + +// VK_NV_framebuffer_mixed_samples is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_framebuffer_mixed_samples 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 +#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" + +typedef enum VkCoverageModulationModeNV { + VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, + VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, + VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, + VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, + VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageModulationModeNV; +typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; +typedef struct VkPipelineCoverageModulationStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageModulationStateCreateFlagsNV flags; + VkCoverageModulationModeNV coverageModulationMode; + VkBool32 coverageModulationTableEnable; + uint32_t coverageModulationTableCount; + const float* pCoverageModulationTable; +} VkPipelineCoverageModulationStateCreateInfoNV; + + + +// VK_NV_fill_rectangle is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fill_rectangle 1 +#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 +#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" + + +// VK_NV_shader_sm_builtins is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_sm_builtins 1 +#define VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION 1 +#define VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME "VK_NV_shader_sm_builtins" +typedef struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t shaderSMCount; + uint32_t shaderWarpsPerSM; +} VkPhysicalDeviceShaderSMBuiltinsPropertiesNV; + +typedef struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shaderSMBuiltins; +} VkPhysicalDeviceShaderSMBuiltinsFeaturesNV; + + + +// VK_EXT_post_depth_coverage is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_post_depth_coverage 1 +#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 +#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" + + +// VK_EXT_image_drm_format_modifier is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_drm_format_modifier 1 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 2 +#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier" +typedef struct VkDrmFormatModifierPropertiesEXT { + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + VkFormatFeatureFlags drmFormatModifierTilingFeatures; +} VkDrmFormatModifierPropertiesEXT; + +typedef struct VkDrmFormatModifierPropertiesListEXT { + VkStructureType sType; + void* pNext; + uint32_t drmFormatModifierCount; + VkDrmFormatModifierPropertiesEXT* pDrmFormatModifierProperties; +} VkDrmFormatModifierPropertiesListEXT; + +typedef struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + VkSharingMode sharingMode; + uint32_t queueFamilyIndexCount; + const uint32_t* pQueueFamilyIndices; +} VkPhysicalDeviceImageDrmFormatModifierInfoEXT; + +typedef struct VkImageDrmFormatModifierListCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t drmFormatModifierCount; + const uint64_t* pDrmFormatModifiers; +} VkImageDrmFormatModifierListCreateInfoEXT; + +typedef struct VkImageDrmFormatModifierExplicitCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + const VkSubresourceLayout* pPlaneLayouts; +} VkImageDrmFormatModifierExplicitCreateInfoEXT; + +typedef struct VkImageDrmFormatModifierPropertiesEXT { + VkStructureType sType; + void* pNext; + uint64_t drmFormatModifier; +} VkImageDrmFormatModifierPropertiesEXT; + +typedef struct VkDrmFormatModifierProperties2EXT { + uint64_t drmFormatModifier; + uint32_t drmFormatModifierPlaneCount; + VkFormatFeatureFlags2 drmFormatModifierTilingFeatures; +} VkDrmFormatModifierProperties2EXT; + +typedef struct VkDrmFormatModifierPropertiesList2EXT { + VkStructureType sType; + void* pNext; + uint32_t drmFormatModifierCount; + VkDrmFormatModifierProperties2EXT* pDrmFormatModifierProperties; +} VkDrmFormatModifierPropertiesList2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetImageDrmFormatModifierPropertiesEXT)(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT( + VkDevice device, + VkImage image, + VkImageDrmFormatModifierPropertiesEXT* pProperties); +#endif + + +// VK_EXT_validation_cache is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_validation_cache 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) +#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 +#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache" + +typedef enum VkValidationCacheHeaderVersionEXT { + VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, + VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationCacheHeaderVersionEXT; +typedef VkFlags VkValidationCacheCreateFlagsEXT; +typedef struct VkValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheCreateFlagsEXT flags; + size_t initialDataSize; + const void* pInitialData; +} VkValidationCacheCreateInfoEXT; + +typedef struct VkShaderModuleValidationCacheCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkValidationCacheEXT validationCache; +} VkShaderModuleValidationCacheCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache); +typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches); +typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT( + VkDevice device, + const VkValidationCacheCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkValidationCacheEXT* pValidationCache); + +VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT( + VkDevice device, + VkValidationCacheEXT dstCache, + uint32_t srcCacheCount, + const VkValidationCacheEXT* pSrcCaches); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + size_t* pDataSize, + void* pData); +#endif + + +// VK_EXT_descriptor_indexing is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_descriptor_indexing 1 +#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2 +#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing" +typedef VkDescriptorBindingFlagBits VkDescriptorBindingFlagBitsEXT; + +typedef VkDescriptorBindingFlags VkDescriptorBindingFlagsEXT; + +typedef VkDescriptorSetLayoutBindingFlagsCreateInfo VkDescriptorSetLayoutBindingFlagsCreateInfoEXT; + +typedef VkPhysicalDeviceDescriptorIndexingFeatures VkPhysicalDeviceDescriptorIndexingFeaturesEXT; + +typedef VkPhysicalDeviceDescriptorIndexingProperties VkPhysicalDeviceDescriptorIndexingPropertiesEXT; + +typedef VkDescriptorSetVariableDescriptorCountAllocateInfo VkDescriptorSetVariableDescriptorCountAllocateInfoEXT; + +typedef VkDescriptorSetVariableDescriptorCountLayoutSupport VkDescriptorSetVariableDescriptorCountLayoutSupportEXT; + + + +// VK_EXT_shader_viewport_index_layer is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_viewport_index_layer 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 +#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" + + +// VK_NV_shading_rate_image is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shading_rate_image 1 +#define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3 +#define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image" + +typedef enum VkShadingRatePaletteEntryNV { + VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV = 0, + VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV = 1, + VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV = 2, + VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV = 3, + VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV = 4, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV = 5, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV = 6, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV = 7, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV = 8, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10, + VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11, + VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF +} VkShadingRatePaletteEntryNV; + +typedef enum VkCoarseSampleOrderTypeNV { + VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV = 0, + VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1, + VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2, + VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3, + VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoarseSampleOrderTypeNV; +typedef struct VkShadingRatePaletteNV { + uint32_t shadingRatePaletteEntryCount; + const VkShadingRatePaletteEntryNV* pShadingRatePaletteEntries; +} VkShadingRatePaletteNV; + +typedef struct VkPipelineViewportShadingRateImageStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 shadingRateImageEnable; + uint32_t viewportCount; + const VkShadingRatePaletteNV* pShadingRatePalettes; +} VkPipelineViewportShadingRateImageStateCreateInfoNV; + +typedef struct VkPhysicalDeviceShadingRateImageFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shadingRateImage; + VkBool32 shadingRateCoarseSampleOrder; +} VkPhysicalDeviceShadingRateImageFeaturesNV; + +typedef struct VkPhysicalDeviceShadingRateImagePropertiesNV { + VkStructureType sType; + void* pNext; + VkExtent2D shadingRateTexelSize; + uint32_t shadingRatePaletteSize; + uint32_t shadingRateMaxCoarseSamples; +} VkPhysicalDeviceShadingRateImagePropertiesNV; + +typedef struct VkCoarseSampleLocationNV { + uint32_t pixelX; + uint32_t pixelY; + uint32_t sample; +} VkCoarseSampleLocationNV; + +typedef struct VkCoarseSampleOrderCustomNV { + VkShadingRatePaletteEntryNV shadingRate; + uint32_t sampleCount; + uint32_t sampleLocationCount; + const VkCoarseSampleLocationNV* pSampleLocations; +} VkCoarseSampleOrderCustomNV; + +typedef struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkCoarseSampleOrderTypeNV sampleOrderType; + uint32_t customSampleOrderCount; + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders; +} VkPipelineViewportCoarseSampleOrderStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdBindShadingRateImageNV)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportShadingRatePaletteNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoarseSampleOrderNV)(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindShadingRateImageNV( + VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportShadingRatePaletteNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkShadingRatePaletteNV* pShadingRatePalettes); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV( + VkCommandBuffer commandBuffer, + VkCoarseSampleOrderTypeNV sampleOrderType, + uint32_t customSampleOrderCount, + const VkCoarseSampleOrderCustomNV* pCustomSampleOrders); +#endif + + +// VK_NV_ray_tracing is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV) +#define VK_NV_RAY_TRACING_SPEC_VERSION 3 +#define VK_NV_RAY_TRACING_EXTENSION_NAME "VK_NV_ray_tracing" +#define VK_SHADER_UNUSED_KHR (~0U) +#define VK_SHADER_UNUSED_NV VK_SHADER_UNUSED_KHR + +typedef enum VkRayTracingShaderGroupTypeKHR { + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = 0, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = 1, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = 2, + VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR, + VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkRayTracingShaderGroupTypeKHR; +typedef VkRayTracingShaderGroupTypeKHR VkRayTracingShaderGroupTypeNV; + + +typedef enum VkGeometryTypeKHR { + VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0, + VK_GEOMETRY_TYPE_AABBS_KHR = 1, + VK_GEOMETRY_TYPE_INSTANCES_KHR = 2, + VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR, + VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR, + VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryTypeKHR; +typedef VkGeometryTypeKHR VkGeometryTypeNV; + + +typedef enum VkAccelerationStructureTypeKHR { + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1, + VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR = 2, + VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR, + VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureTypeKHR; +typedef VkAccelerationStructureTypeKHR VkAccelerationStructureTypeNV; + + +typedef enum VkCopyAccelerationStructureModeKHR { + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR = 0, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR = 1, + VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR = 2, + VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR = 3, + VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR, + VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkCopyAccelerationStructureModeKHR; +typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV; + + +typedef enum VkAccelerationStructureMemoryRequirementsTypeNV { + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2, + VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMemoryRequirementsTypeNV; + +typedef enum VkGeometryFlagBitsKHR { + VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR = 0x00000002, + VK_GEOMETRY_OPAQUE_BIT_NV = VK_GEOMETRY_OPAQUE_BIT_KHR, + VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR, + VK_GEOMETRY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryFlagBitsKHR; +typedef VkFlags VkGeometryFlagsKHR; +typedef VkGeometryFlagsKHR VkGeometryFlagsNV; + +typedef VkGeometryFlagBitsKHR VkGeometryFlagBitsNV; + + +typedef enum VkGeometryInstanceFlagBitsKHR { + VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR = 0x00000001, + VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR = 0x00000002, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008, + VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT = 0x00000010, + VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000020, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR, + VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR, + VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkGeometryInstanceFlagBitsKHR; +typedef VkFlags VkGeometryInstanceFlagsKHR; +typedef VkGeometryInstanceFlagsKHR VkGeometryInstanceFlagsNV; + +typedef VkGeometryInstanceFlagBitsKHR VkGeometryInstanceFlagBitsNV; + + +typedef enum VkBuildAccelerationStructureFlagBitsKHR { + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR = 0x00000001, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR = 0x00000002, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR = 0x00000004, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010, + VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV = 0x00000020, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT = 0x00000040, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000080, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT = 0x00000100, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISPLACEMENT_MICROMAP_UPDATE_NV = 0x00000200, +#endif + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DATA_ACCESS_KHR = 0x00000800, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR, + VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureFlagBitsKHR; +typedef VkFlags VkBuildAccelerationStructureFlagsKHR; +typedef VkBuildAccelerationStructureFlagsKHR VkBuildAccelerationStructureFlagsNV; + +typedef VkBuildAccelerationStructureFlagBitsKHR VkBuildAccelerationStructureFlagBitsNV; + +typedef struct VkRayTracingShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; +} VkRayTracingShaderGroupCreateInfoNV; + +typedef struct VkRayTracingPipelineCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoNV* pGroups; + uint32_t maxRecursionDepth; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoNV; + +typedef struct VkGeometryTrianglesNV { + VkStructureType sType; + const void* pNext; + VkBuffer vertexData; + VkDeviceSize vertexOffset; + uint32_t vertexCount; + VkDeviceSize vertexStride; + VkFormat vertexFormat; + VkBuffer indexData; + VkDeviceSize indexOffset; + uint32_t indexCount; + VkIndexType indexType; + VkBuffer transformData; + VkDeviceSize transformOffset; +} VkGeometryTrianglesNV; + +typedef struct VkGeometryAABBNV { + VkStructureType sType; + const void* pNext; + VkBuffer aabbData; + uint32_t numAABBs; + uint32_t stride; + VkDeviceSize offset; +} VkGeometryAABBNV; + +typedef struct VkGeometryDataNV { + VkGeometryTrianglesNV triangles; + VkGeometryAABBNV aabbs; +} VkGeometryDataNV; + +typedef struct VkGeometryNV { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkGeometryDataNV geometry; + VkGeometryFlagsKHR flags; +} VkGeometryNV; + +typedef struct VkAccelerationStructureInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeNV type; + VkBuildAccelerationStructureFlagsNV flags; + uint32_t instanceCount; + uint32_t geometryCount; + const VkGeometryNV* pGeometries; +} VkAccelerationStructureInfoNV; + +typedef struct VkAccelerationStructureCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceSize compactedSize; + VkAccelerationStructureInfoNV info; +} VkAccelerationStructureCreateInfoNV; + +typedef struct VkBindAccelerationStructureMemoryInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureNV accelerationStructure; + VkDeviceMemory memory; + VkDeviceSize memoryOffset; + uint32_t deviceIndexCount; + const uint32_t* pDeviceIndices; +} VkBindAccelerationStructureMemoryInfoNV; + +typedef struct VkWriteDescriptorSetAccelerationStructureNV { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureNV* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureNV; + +typedef struct VkAccelerationStructureMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureMemoryRequirementsTypeNV type; + VkAccelerationStructureNV accelerationStructure; +} VkAccelerationStructureMemoryRequirementsInfoNV; + +typedef struct VkPhysicalDeviceRayTracingPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxTriangleCount; + uint32_t maxDescriptorSetAccelerationStructures; +} VkPhysicalDeviceRayTracingPropertiesNV; + +typedef struct VkTransformMatrixKHR { + float matrix[3][4]; +} VkTransformMatrixKHR; + +typedef VkTransformMatrixKHR VkTransformMatrixNV; + +typedef struct VkAabbPositionsKHR { + float minX; + float minY; + float minZ; + float maxX; + float maxY; + float maxZ; +} VkAabbPositionsKHR; + +typedef VkAabbPositionsKHR VkAabbPositionsNV; + +typedef struct VkAccelerationStructureInstanceKHR { + VkTransformMatrixKHR transform; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureInstanceKHR; + +typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements); +typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV( + VkDevice device, + const VkAccelerationStructureCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureNV* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV( + VkDevice device, + const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2KHR* pMemoryRequirements); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV( + VkDevice device, + uint32_t bindInfoCount, + const VkBindAccelerationStructureMemoryInfoNV* pBindInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV( + VkCommandBuffer commandBuffer, + const VkAccelerationStructureInfoNV* pInfo, + VkBuffer instanceData, + VkDeviceSize instanceOffset, + VkBool32 update, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkBuffer scratch, + VkDeviceSize scratchOffset); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV( + VkCommandBuffer commandBuffer, + VkAccelerationStructureNV dst, + VkAccelerationStructureNV src, + VkCopyAccelerationStructureModeKHR mode); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV( + VkCommandBuffer commandBuffer, + VkBuffer raygenShaderBindingTableBuffer, + VkDeviceSize raygenShaderBindingOffset, + VkBuffer missShaderBindingTableBuffer, + VkDeviceSize missShaderBindingOffset, + VkDeviceSize missShaderBindingStride, + VkBuffer hitShaderBindingTableBuffer, + VkDeviceSize hitShaderBindingOffset, + VkDeviceSize hitShaderBindingStride, + VkBuffer callableShaderBindingTableBuffer, + VkDeviceSize callableShaderBindingOffset, + VkDeviceSize callableShaderBindingStride, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV( + VkDevice device, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoNV* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV( + VkDevice device, + VkAccelerationStructureNV accelerationStructure, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureNV* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV( + VkDevice device, + VkPipeline pipeline, + uint32_t shader); +#endif + + +// VK_NV_representative_fragment_test is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_representative_fragment_test 1 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 2 +#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test" +typedef struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 representativeFragmentTest; +} VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV; + +typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 representativeFragmentTestEnable; +} VkPipelineRepresentativeFragmentTestStateCreateInfoNV; + + + +// VK_EXT_filter_cubic is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_filter_cubic 1 +#define VK_EXT_FILTER_CUBIC_SPEC_VERSION 3 +#define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic" +typedef struct VkPhysicalDeviceImageViewImageFormatInfoEXT { + VkStructureType sType; + void* pNext; + VkImageViewType imageViewType; +} VkPhysicalDeviceImageViewImageFormatInfoEXT; + +typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 filterCubic; + VkBool32 filterCubicMinmax; +} VkFilterCubicImageViewImageFormatPropertiesEXT; + + + +// VK_QCOM_render_pass_shader_resolve is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_render_pass_shader_resolve 1 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION 4 +#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve" + + +// VK_EXT_global_priority is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_global_priority 1 +#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2 +#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority" +typedef VkQueueGlobalPriorityKHR VkQueueGlobalPriorityEXT; + +typedef VkDeviceQueueGlobalPriorityCreateInfoKHR VkDeviceQueueGlobalPriorityCreateInfoEXT; + + + +// VK_EXT_external_memory_host is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_host 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host" +typedef struct VkImportMemoryHostPointerInfoEXT { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + void* pHostPointer; +} VkImportMemoryHostPointerInfoEXT; + +typedef struct VkMemoryHostPointerPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryHostPointerPropertiesEXT; + +typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize minImportedHostPointerAlignment; +} VkPhysicalDeviceExternalMemoryHostPropertiesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + const void* pHostPointer, + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); +#endif + + +// VK_AMD_buffer_marker is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_buffer_marker 1 +#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1 +#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker" +typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD( + VkCommandBuffer commandBuffer, + VkPipelineStageFlagBits pipelineStage, + VkBuffer dstBuffer, + VkDeviceSize dstOffset, + uint32_t marker); +#endif + + +// VK_AMD_pipeline_compiler_control is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_pipeline_compiler_control 1 +#define VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION 1 +#define VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME "VK_AMD_pipeline_compiler_control" + +typedef enum VkPipelineCompilerControlFlagBitsAMD { + VK_PIPELINE_COMPILER_CONTROL_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF +} VkPipelineCompilerControlFlagBitsAMD; +typedef VkFlags VkPipelineCompilerControlFlagsAMD; +typedef struct VkPipelineCompilerControlCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkPipelineCompilerControlFlagsAMD compilerControlFlags; +} VkPipelineCompilerControlCreateInfoAMD; + + + +// VK_EXT_calibrated_timestamps is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_calibrated_timestamps 1 +#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 2 +#define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps" +typedef VkTimeDomainKHR VkTimeDomainEXT; + +typedef VkCalibratedTimestampInfoKHR VkCalibratedTimestampInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains); +typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsEXT)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pTimeDomainCount, + VkTimeDomainKHR* pTimeDomains); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT( + VkDevice device, + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation); +#endif + + +// VK_AMD_shader_core_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_core_properties 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 2 +#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties" +typedef struct VkPhysicalDeviceShaderCorePropertiesAMD { + VkStructureType sType; + void* pNext; + uint32_t shaderEngineCount; + uint32_t shaderArraysPerEngineCount; + uint32_t computeUnitsPerShaderArray; + uint32_t simdPerComputeUnit; + uint32_t wavefrontsPerSimd; + uint32_t wavefrontSize; + uint32_t sgprsPerSimd; + uint32_t minSgprAllocation; + uint32_t maxSgprAllocation; + uint32_t sgprAllocationGranularity; + uint32_t vgprsPerSimd; + uint32_t minVgprAllocation; + uint32_t maxVgprAllocation; + uint32_t vgprAllocationGranularity; +} VkPhysicalDeviceShaderCorePropertiesAMD; + + + +// VK_AMD_memory_overallocation_behavior is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_memory_overallocation_behavior 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1 +#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior" + +typedef enum VkMemoryOverallocationBehaviorAMD { + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2, + VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF +} VkMemoryOverallocationBehaviorAMD; +typedef struct VkDeviceMemoryOverallocationCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkMemoryOverallocationBehaviorAMD overallocationBehavior; +} VkDeviceMemoryOverallocationCreateInfoAMD; + + + +// VK_EXT_vertex_attribute_divisor is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_vertex_attribute_divisor 1 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3 +#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor" +typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxVertexAttribDivisor; +} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT; + +typedef VkVertexInputBindingDivisorDescriptionKHR VkVertexInputBindingDivisorDescriptionEXT; + +typedef VkPipelineVertexInputDivisorStateCreateInfoKHR VkPipelineVertexInputDivisorStateCreateInfoEXT; + +typedef VkPhysicalDeviceVertexAttributeDivisorFeaturesKHR VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT; + + + +// VK_EXT_pipeline_creation_feedback is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_creation_feedback 1 +#define VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME "VK_EXT_pipeline_creation_feedback" +typedef VkPipelineCreationFeedbackFlagBits VkPipelineCreationFeedbackFlagBitsEXT; + +typedef VkPipelineCreationFeedbackFlags VkPipelineCreationFeedbackFlagsEXT; + +typedef VkPipelineCreationFeedbackCreateInfo VkPipelineCreationFeedbackCreateInfoEXT; + +typedef VkPipelineCreationFeedback VkPipelineCreationFeedbackEXT; + + + +// VK_NV_shader_subgroup_partitioned is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_subgroup_partitioned 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1 +#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned" + + +// VK_NV_compute_shader_derivatives is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_compute_shader_derivatives 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1 +#define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives" +typedef VkPhysicalDeviceComputeShaderDerivativesFeaturesKHR VkPhysicalDeviceComputeShaderDerivativesFeaturesNV; + + + +// VK_NV_mesh_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_mesh_shader 1 +#define VK_NV_MESH_SHADER_SPEC_VERSION 1 +#define VK_NV_MESH_SHADER_EXTENSION_NAME "VK_NV_mesh_shader" +typedef struct VkPhysicalDeviceMeshShaderFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 taskShader; + VkBool32 meshShader; +} VkPhysicalDeviceMeshShaderFeaturesNV; + +typedef struct VkPhysicalDeviceMeshShaderPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxDrawMeshTasksCount; + uint32_t maxTaskWorkGroupInvocations; + uint32_t maxTaskWorkGroupSize[3]; + uint32_t maxTaskTotalMemorySize; + uint32_t maxTaskOutputCount; + uint32_t maxMeshWorkGroupInvocations; + uint32_t maxMeshWorkGroupSize[3]; + uint32_t maxMeshTotalMemorySize; + uint32_t maxMeshOutputVertices; + uint32_t maxMeshOutputPrimitives; + uint32_t maxMeshMultiviewViewCount; + uint32_t meshOutputPerVertexGranularity; + uint32_t meshOutputPerPrimitiveGranularity; +} VkPhysicalDeviceMeshShaderPropertiesNV; + +typedef struct VkDrawMeshTasksIndirectCommandNV { + uint32_t taskCount; + uint32_t firstTask; +} VkDrawMeshTasksIndirectCommandNV; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksNV)(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksNV( + VkCommandBuffer commandBuffer, + uint32_t taskCount, + uint32_t firstTask); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + + +// VK_NV_fragment_shader_barycentric is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fragment_shader_barycentric 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric" +typedef VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV; + + + +// VK_NV_shader_image_footprint is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_image_footprint 1 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 2 +#define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint" +typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 imageFootprint; +} VkPhysicalDeviceShaderImageFootprintFeaturesNV; + + + +// VK_NV_scissor_exclusive is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_scissor_exclusive 1 +#define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 2 +#define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive" +typedef struct VkPipelineViewportExclusiveScissorStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t exclusiveScissorCount; + const VkRect2D* pExclusiveScissors; +} VkPipelineViewportExclusiveScissorStateCreateInfoNV; + +typedef struct VkPhysicalDeviceExclusiveScissorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 exclusiveScissor; +} VkPhysicalDeviceExclusiveScissorFeaturesNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorEnableNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkBool32* pExclusiveScissorEnables); +typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorEnableNV( + VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkBool32* pExclusiveScissorEnables); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV( + VkCommandBuffer commandBuffer, + uint32_t firstExclusiveScissor, + uint32_t exclusiveScissorCount, + const VkRect2D* pExclusiveScissors); +#endif + + +// VK_NV_device_diagnostic_checkpoints is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_diagnostic_checkpoints 1 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2 +#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints" +typedef struct VkQueueFamilyCheckpointPropertiesNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlags checkpointExecutionStageMask; +} VkQueueFamilyCheckpointPropertiesNV; + +typedef struct VkCheckpointDataNV { + VkStructureType sType; + void* pNext; + VkPipelineStageFlagBits stage; + void* pCheckpointMarker; +} VkCheckpointDataNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker); +typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV( + VkCommandBuffer commandBuffer, + const void* pCheckpointMarker); + +VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV( + VkQueue queue, + uint32_t* pCheckpointDataCount, + VkCheckpointDataNV* pCheckpointData); +#endif + + +// VK_INTEL_shader_integer_functions2 is a preprocessor guard. Do not pass it to API calls. +#define VK_INTEL_shader_integer_functions2 1 +#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION 1 +#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME "VK_INTEL_shader_integer_functions2" +typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL { + VkStructureType sType; + void* pNext; + VkBool32 shaderIntegerFunctions2; +} VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL; + + + +// VK_INTEL_performance_query is a preprocessor guard. Do not pass it to API calls. +#define VK_INTEL_performance_query 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL) +#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2 +#define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query" + +typedef enum VkPerformanceConfigurationTypeINTEL { + VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0, + VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceConfigurationTypeINTEL; + +typedef enum VkQueryPoolSamplingModeINTEL { + VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0, + VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkQueryPoolSamplingModeINTEL; + +typedef enum VkPerformanceOverrideTypeINTEL { + VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0, + VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1, + VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceOverrideTypeINTEL; + +typedef enum VkPerformanceParameterTypeINTEL { + VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0, + VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1, + VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceParameterTypeINTEL; + +typedef enum VkPerformanceValueTypeINTEL { + VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL = 0, + VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL = 1, + VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2, + VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3, + VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4, + VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF +} VkPerformanceValueTypeINTEL; +typedef union VkPerformanceValueDataINTEL { + uint32_t value32; + uint64_t value64; + float valueFloat; + VkBool32 valueBool; + const char* valueString; +} VkPerformanceValueDataINTEL; + +typedef struct VkPerformanceValueINTEL { + VkPerformanceValueTypeINTEL type; + VkPerformanceValueDataINTEL data; +} VkPerformanceValueINTEL; + +typedef struct VkInitializePerformanceApiInfoINTEL { + VkStructureType sType; + const void* pNext; + void* pUserData; +} VkInitializePerformanceApiInfoINTEL; + +typedef struct VkQueryPoolPerformanceQueryCreateInfoINTEL { + VkStructureType sType; + const void* pNext; + VkQueryPoolSamplingModeINTEL performanceCountersSampling; +} VkQueryPoolPerformanceQueryCreateInfoINTEL; + +typedef VkQueryPoolPerformanceQueryCreateInfoINTEL VkQueryPoolCreateInfoINTEL; + +typedef struct VkPerformanceMarkerInfoINTEL { + VkStructureType sType; + const void* pNext; + uint64_t marker; +} VkPerformanceMarkerInfoINTEL; + +typedef struct VkPerformanceStreamMarkerInfoINTEL { + VkStructureType sType; + const void* pNext; + uint32_t marker; +} VkPerformanceStreamMarkerInfoINTEL; + +typedef struct VkPerformanceOverrideInfoINTEL { + VkStructureType sType; + const void* pNext; + VkPerformanceOverrideTypeINTEL type; + VkBool32 enable; + uint64_t parameter; +} VkPerformanceOverrideInfoINTEL; + +typedef struct VkPerformanceConfigurationAcquireInfoINTEL { + VkStructureType sType; + const void* pNext; + VkPerformanceConfigurationTypeINTEL type; +} VkPerformanceConfigurationAcquireInfoINTEL; + +typedef VkResult (VKAPI_PTR *PFN_vkInitializePerformanceApiINTEL)(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); +typedef void (VKAPI_PTR *PFN_vkUninitializePerformanceApiINTEL)(VkDevice device); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceStreamMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceOverrideINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo); +typedef VkResult (VKAPI_PTR *PFN_vkAcquirePerformanceConfigurationINTEL)(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration); +typedef VkResult (VKAPI_PTR *PFN_vkReleasePerformanceConfigurationINTEL)(VkDevice device, VkPerformanceConfigurationINTEL configuration); +typedef VkResult (VKAPI_PTR *PFN_vkQueueSetPerformanceConfigurationINTEL)(VkQueue queue, VkPerformanceConfigurationINTEL configuration); +typedef VkResult (VKAPI_PTR *PFN_vkGetPerformanceParameterINTEL)(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkInitializePerformanceApiINTEL( + VkDevice device, + const VkInitializePerformanceApiInfoINTEL* pInitializeInfo); + +VKAPI_ATTR void VKAPI_CALL vkUninitializePerformanceApiINTEL( + VkDevice device); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceMarkerINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceMarkerInfoINTEL* pMarkerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceStreamMarkerINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceOverrideINTEL( + VkCommandBuffer commandBuffer, + const VkPerformanceOverrideInfoINTEL* pOverrideInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquirePerformanceConfigurationINTEL( + VkDevice device, + const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, + VkPerformanceConfigurationINTEL* pConfiguration); + +VKAPI_ATTR VkResult VKAPI_CALL vkReleasePerformanceConfigurationINTEL( + VkDevice device, + VkPerformanceConfigurationINTEL configuration); + +VKAPI_ATTR VkResult VKAPI_CALL vkQueueSetPerformanceConfigurationINTEL( + VkQueue queue, + VkPerformanceConfigurationINTEL configuration); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetPerformanceParameterINTEL( + VkDevice device, + VkPerformanceParameterTypeINTEL parameter, + VkPerformanceValueINTEL* pValue); +#endif + + +// VK_EXT_pci_bus_info is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pci_bus_info 1 +#define VK_EXT_PCI_BUS_INFO_SPEC_VERSION 2 +#define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info" +typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t pciDomain; + uint32_t pciBus; + uint32_t pciDevice; + uint32_t pciFunction; +} VkPhysicalDevicePCIBusInfoPropertiesEXT; + + + +// VK_AMD_display_native_hdr is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_display_native_hdr 1 +#define VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION 1 +#define VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME "VK_AMD_display_native_hdr" +typedef struct VkDisplayNativeHdrSurfaceCapabilitiesAMD { + VkStructureType sType; + void* pNext; + VkBool32 localDimmingSupport; +} VkDisplayNativeHdrSurfaceCapabilitiesAMD; + +typedef struct VkSwapchainDisplayNativeHdrCreateInfoAMD { + VkStructureType sType; + const void* pNext; + VkBool32 localDimmingEnable; +} VkSwapchainDisplayNativeHdrCreateInfoAMD; + +typedef void (VKAPI_PTR *PFN_vkSetLocalDimmingAMD)(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD( + VkDevice device, + VkSwapchainKHR swapChain, + VkBool32 localDimmingEnable); +#endif + + +// VK_EXT_fragment_density_map is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_density_map 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 2 +#define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map" +typedef struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMap; + VkBool32 fragmentDensityMapDynamic; + VkBool32 fragmentDensityMapNonSubsampledImages; +} VkPhysicalDeviceFragmentDensityMapFeaturesEXT; + +typedef struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT { + VkStructureType sType; + void* pNext; + VkExtent2D minFragmentDensityTexelSize; + VkExtent2D maxFragmentDensityTexelSize; + VkBool32 fragmentDensityInvocations; +} VkPhysicalDeviceFragmentDensityMapPropertiesEXT; + +typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkAttachmentReference fragmentDensityMapAttachment; +} VkRenderPassFragmentDensityMapCreateInfoEXT; + + + +// VK_EXT_scalar_block_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_scalar_block_layout 1 +#define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1 +#define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout" +typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLayoutFeaturesEXT; + + + +// VK_GOOGLE_hlsl_functionality1 is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_hlsl_functionality1 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION 1 +#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1" +// VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION is a deprecated alias +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION +// VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME is a deprecated alias +#define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME + + +// VK_GOOGLE_decorate_string is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_decorate_string 1 +#define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 1 +#define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string" + + +// VK_EXT_subgroup_size_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_subgroup_size_control 1 +#define VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION 2 +#define VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME "VK_EXT_subgroup_size_control" +typedef VkPhysicalDeviceSubgroupSizeControlFeatures VkPhysicalDeviceSubgroupSizeControlFeaturesEXT; + +typedef VkPhysicalDeviceSubgroupSizeControlProperties VkPhysicalDeviceSubgroupSizeControlPropertiesEXT; + +typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT; + + + +// VK_AMD_shader_core_properties2 is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_core_properties2 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION 1 +#define VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME "VK_AMD_shader_core_properties2" + +typedef enum VkShaderCorePropertiesFlagBitsAMD { + VK_SHADER_CORE_PROPERTIES_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF +} VkShaderCorePropertiesFlagBitsAMD; +typedef VkFlags VkShaderCorePropertiesFlagsAMD; +typedef struct VkPhysicalDeviceShaderCoreProperties2AMD { + VkStructureType sType; + void* pNext; + VkShaderCorePropertiesFlagsAMD shaderCoreFeatures; + uint32_t activeComputeUnitCount; +} VkPhysicalDeviceShaderCoreProperties2AMD; + + + +// VK_AMD_device_coherent_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_device_coherent_memory 1 +#define VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION 1 +#define VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME "VK_AMD_device_coherent_memory" +typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD { + VkStructureType sType; + void* pNext; + VkBool32 deviceCoherentMemory; +} VkPhysicalDeviceCoherentMemoryFeaturesAMD; + + + +// VK_EXT_shader_image_atomic_int64 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_image_atomic_int64 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION 1 +#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME "VK_EXT_shader_image_atomic_int64" +typedef struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderImageInt64Atomics; + VkBool32 sparseImageInt64Atomics; +} VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT; + + + +// VK_EXT_memory_budget is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_memory_budget 1 +#define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1 +#define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget" +typedef struct VkPhysicalDeviceMemoryBudgetPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize heapBudget[VK_MAX_MEMORY_HEAPS]; + VkDeviceSize heapUsage[VK_MAX_MEMORY_HEAPS]; +} VkPhysicalDeviceMemoryBudgetPropertiesEXT; + + + +// VK_EXT_memory_priority is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_memory_priority 1 +#define VK_EXT_MEMORY_PRIORITY_SPEC_VERSION 1 +#define VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME "VK_EXT_memory_priority" +typedef struct VkPhysicalDeviceMemoryPriorityFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 memoryPriority; +} VkPhysicalDeviceMemoryPriorityFeaturesEXT; + +typedef struct VkMemoryPriorityAllocateInfoEXT { + VkStructureType sType; + const void* pNext; + float priority; +} VkMemoryPriorityAllocateInfoEXT; + + + +// VK_NV_dedicated_allocation_image_aliasing is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_dedicated_allocation_image_aliasing 1 +#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION 1 +#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME "VK_NV_dedicated_allocation_image_aliasing" +typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 dedicatedAllocationImageAliasing; +} VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV; + + + +// VK_EXT_buffer_device_address is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_buffer_device_address 1 +#define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2 +#define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address" +typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 bufferDeviceAddress; + VkBool32 bufferDeviceAddressCaptureReplay; + VkBool32 bufferDeviceAddressMultiDevice; +} VkPhysicalDeviceBufferDeviceAddressFeaturesEXT; + +typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT VkPhysicalDeviceBufferAddressFeaturesEXT; + +typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoEXT; + +typedef struct VkBufferDeviceAddressCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceAddress deviceAddress; +} VkBufferDeviceAddressCreateInfoEXT; + +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT( + VkDevice device, + const VkBufferDeviceAddressInfo* pInfo); +#endif + + +// VK_EXT_tooling_info is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_tooling_info 1 +#define VK_EXT_TOOLING_INFO_SPEC_VERSION 1 +#define VK_EXT_TOOLING_INFO_EXTENSION_NAME "VK_EXT_tooling_info" +typedef VkToolPurposeFlagBits VkToolPurposeFlagBitsEXT; + +typedef VkToolPurposeFlags VkToolPurposeFlagsEXT; + +typedef VkPhysicalDeviceToolProperties VkPhysicalDeviceToolPropertiesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolPropertiesEXT)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolPropertiesEXT( + VkPhysicalDevice physicalDevice, + uint32_t* pToolCount, + VkPhysicalDeviceToolProperties* pToolProperties); +#endif + + +// VK_EXT_separate_stencil_usage is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_separate_stencil_usage 1 +#define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1 +#define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage" +typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT; + + + +// VK_EXT_validation_features is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_validation_features 1 +#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 6 +#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features" + +typedef enum VkValidationFeatureEnableEXT { + VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0, + VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1, + VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2, + VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3, + VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT = 4, + VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationFeatureEnableEXT; + +typedef enum VkValidationFeatureDisableEXT { + VK_VALIDATION_FEATURE_DISABLE_ALL_EXT = 0, + VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT = 1, + VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT = 2, + VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT = 3, + VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4, + VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5, + VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6, + VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT = 7, + VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkValidationFeatureDisableEXT; +typedef struct VkValidationFeaturesEXT { + VkStructureType sType; + const void* pNext; + uint32_t enabledValidationFeatureCount; + const VkValidationFeatureEnableEXT* pEnabledValidationFeatures; + uint32_t disabledValidationFeatureCount; + const VkValidationFeatureDisableEXT* pDisabledValidationFeatures; +} VkValidationFeaturesEXT; + + + +// VK_NV_cooperative_matrix is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cooperative_matrix 1 +#define VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION 1 +#define VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_NV_cooperative_matrix" +typedef VkComponentTypeKHR VkComponentTypeNV; + +typedef VkScopeKHR VkScopeNV; + +typedef struct VkCooperativeMatrixPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t MSize; + uint32_t NSize; + uint32_t KSize; + VkComponentTypeNV AType; + VkComponentTypeNV BType; + VkComponentTypeNV CType; + VkComponentTypeNV DType; + VkScopeNV scope; +} VkCooperativeMatrixPropertiesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cooperativeMatrix; + VkBool32 cooperativeMatrixRobustBufferAccess; +} VkPhysicalDeviceCooperativeMatrixFeaturesNV; + +typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesNV { + VkStructureType sType; + void* pNext; + VkShaderStageFlags cooperativeMatrixSupportedStages; +} VkPhysicalDeviceCooperativeMatrixPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( + VkPhysicalDevice physicalDevice, + uint32_t* pPropertyCount, + VkCooperativeMatrixPropertiesNV* pProperties); +#endif + + +// VK_NV_coverage_reduction_mode is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_coverage_reduction_mode 1 +#define VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION 1 +#define VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME "VK_NV_coverage_reduction_mode" + +typedef enum VkCoverageReductionModeNV { + VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0, + VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1, + VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkCoverageReductionModeNV; +typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV; +typedef struct VkPhysicalDeviceCoverageReductionModeFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 coverageReductionMode; +} VkPhysicalDeviceCoverageReductionModeFeaturesNV; + +typedef struct VkPipelineCoverageReductionStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineCoverageReductionStateCreateFlagsNV flags; + VkCoverageReductionModeNV coverageReductionMode; +} VkPipelineCoverageReductionStateCreateInfoNV; + +typedef struct VkFramebufferMixedSamplesCombinationNV { + VkStructureType sType; + void* pNext; + VkCoverageReductionModeNV coverageReductionMode; + VkSampleCountFlagBits rasterizationSamples; + VkSampleCountFlags depthStencilSamples; + VkSampleCountFlags colorSamples; +} VkFramebufferMixedSamplesCombinationNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( + VkPhysicalDevice physicalDevice, + uint32_t* pCombinationCount, + VkFramebufferMixedSamplesCombinationNV* pCombinations); +#endif + + +// VK_EXT_fragment_shader_interlock is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_shader_interlock 1 +#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME "VK_EXT_fragment_shader_interlock" +typedef struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShaderSampleInterlock; + VkBool32 fragmentShaderPixelInterlock; + VkBool32 fragmentShaderShadingRateInterlock; +} VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT; + + + +// VK_EXT_ycbcr_image_arrays is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_ycbcr_image_arrays 1 +#define VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME "VK_EXT_ycbcr_image_arrays" +typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 ycbcrImageArrays; +} VkPhysicalDeviceYcbcrImageArraysFeaturesEXT; + + + +// VK_EXT_provoking_vertex is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_provoking_vertex 1 +#define VK_EXT_PROVOKING_VERTEX_SPEC_VERSION 1 +#define VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME "VK_EXT_provoking_vertex" + +typedef enum VkProvokingVertexModeEXT { + VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT = 0, + VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT = 1, + VK_PROVOKING_VERTEX_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkProvokingVertexModeEXT; +typedef struct VkPhysicalDeviceProvokingVertexFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 provokingVertexLast; + VkBool32 transformFeedbackPreservesProvokingVertex; +} VkPhysicalDeviceProvokingVertexFeaturesEXT; + +typedef struct VkPhysicalDeviceProvokingVertexPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 provokingVertexModePerPipeline; + VkBool32 transformFeedbackPreservesTriangleFanProvokingVertex; +} VkPhysicalDeviceProvokingVertexPropertiesEXT; + +typedef struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkProvokingVertexModeEXT provokingVertexMode; +} VkPipelineRasterizationProvokingVertexStateCreateInfoEXT; + + + +// VK_EXT_headless_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_headless_surface 1 +#define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 1 +#define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface" +typedef VkFlags VkHeadlessSurfaceCreateFlagsEXT; +typedef struct VkHeadlessSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkHeadlessSurfaceCreateFlagsEXT flags; +} VkHeadlessSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateHeadlessSurfaceEXT)(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT( + VkInstance instance, + const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_EXT_line_rasterization is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_line_rasterization 1 +#define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1 +#define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization" +typedef VkLineRasterizationModeKHR VkLineRasterizationModeEXT; + +typedef VkPhysicalDeviceLineRasterizationFeaturesKHR VkPhysicalDeviceLineRasterizationFeaturesEXT; + +typedef VkPhysicalDeviceLineRasterizationPropertiesKHR VkPhysicalDeviceLineRasterizationPropertiesEXT; + +typedef VkPipelineRasterizationLineStateCreateInfoKHR VkPipelineRasterizationLineStateCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEXT)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT( + VkCommandBuffer commandBuffer, + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); +#endif + + +// VK_EXT_shader_atomic_float is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_atomic_float 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float" +typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferFloat32Atomics; + VkBool32 shaderBufferFloat32AtomicAdd; + VkBool32 shaderBufferFloat64Atomics; + VkBool32 shaderBufferFloat64AtomicAdd; + VkBool32 shaderSharedFloat32Atomics; + VkBool32 shaderSharedFloat32AtomicAdd; + VkBool32 shaderSharedFloat64Atomics; + VkBool32 shaderSharedFloat64AtomicAdd; + VkBool32 shaderImageFloat32Atomics; + VkBool32 shaderImageFloat32AtomicAdd; + VkBool32 sparseImageFloat32Atomics; + VkBool32 sparseImageFloat32AtomicAdd; +} VkPhysicalDeviceShaderAtomicFloatFeaturesEXT; + + + +// VK_EXT_host_query_reset is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_host_query_reset 1 +#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1 +#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset" +typedef VkPhysicalDeviceHostQueryResetFeatures VkPhysicalDeviceHostQueryResetFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkResetQueryPoolEXT)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT( + VkDevice device, + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); +#endif + + +// VK_EXT_index_type_uint8 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_index_type_uint8 1 +#define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1 +#define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8" +typedef VkPhysicalDeviceIndexTypeUint8FeaturesKHR VkPhysicalDeviceIndexTypeUint8FeaturesEXT; + + + +// VK_EXT_extended_dynamic_state is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_extended_dynamic_state 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_extended_dynamic_state" +typedef struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState; +} VkPhysicalDeviceExtendedDynamicStateFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetCullModeEXT)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFaceEXT)(VkCommandBuffer commandBuffer, VkFrontFace frontFace); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopologyEXT)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); +typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); +typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2EXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOpEXT)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOpEXT)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetCullModeEXT( + VkCommandBuffer commandBuffer, + VkCullModeFlags cullMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFaceEXT( + VkCommandBuffer commandBuffer, + VkFrontFace frontFace); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopologyEXT( + VkCommandBuffer commandBuffer, + VkPrimitiveTopology primitiveTopology); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t viewportCount, + const VkViewport* pViewports); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCountEXT( + VkCommandBuffer commandBuffer, + uint32_t scissorCount, + const VkRect2D* pScissors); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2EXT( + VkCommandBuffer commandBuffer, + uint32_t firstBinding, + uint32_t bindingCount, + const VkBuffer* pBuffers, + const VkDeviceSize* pOffsets, + const VkDeviceSize* pSizes, + const VkDeviceSize* pStrides); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthWriteEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOpEXT( + VkCommandBuffer commandBuffer, + VkCompareOp depthCompareOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBoundsTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 stencilTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT( + VkCommandBuffer commandBuffer, + VkStencilFaceFlags faceMask, + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp); +#endif + + +// VK_EXT_host_image_copy is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_host_image_copy 1 +#define VK_EXT_HOST_IMAGE_COPY_SPEC_VERSION 1 +#define VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME "VK_EXT_host_image_copy" + +typedef enum VkHostImageCopyFlagBitsEXT { + VK_HOST_IMAGE_COPY_MEMCPY_EXT = 0x00000001, + VK_HOST_IMAGE_COPY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkHostImageCopyFlagBitsEXT; +typedef VkFlags VkHostImageCopyFlagsEXT; +typedef struct VkPhysicalDeviceHostImageCopyFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 hostImageCopy; +} VkPhysicalDeviceHostImageCopyFeaturesEXT; + +typedef struct VkPhysicalDeviceHostImageCopyPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t copySrcLayoutCount; + VkImageLayout* pCopySrcLayouts; + uint32_t copyDstLayoutCount; + VkImageLayout* pCopyDstLayouts; + uint8_t optimalTilingLayoutUUID[VK_UUID_SIZE]; + VkBool32 identicalMemoryTypeRequirements; +} VkPhysicalDeviceHostImageCopyPropertiesEXT; + +typedef struct VkMemoryToImageCopyEXT { + VkStructureType sType; + const void* pNext; + const void* pHostPointer; + uint32_t memoryRowLength; + uint32_t memoryImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkMemoryToImageCopyEXT; + +typedef struct VkImageToMemoryCopyEXT { + VkStructureType sType; + const void* pNext; + void* pHostPointer; + uint32_t memoryRowLength; + uint32_t memoryImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkImageToMemoryCopyEXT; + +typedef struct VkCopyMemoryToImageInfoEXT { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlagsEXT flags; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkMemoryToImageCopyEXT* pRegions; +} VkCopyMemoryToImageInfoEXT; + +typedef struct VkCopyImageToMemoryInfoEXT { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlagsEXT flags; + VkImage srcImage; + VkImageLayout srcImageLayout; + uint32_t regionCount; + const VkImageToMemoryCopyEXT* pRegions; +} VkCopyImageToMemoryInfoEXT; + +typedef struct VkCopyImageToImageInfoEXT { + VkStructureType sType; + const void* pNext; + VkHostImageCopyFlagsEXT flags; + VkImage srcImage; + VkImageLayout srcImageLayout; + VkImage dstImage; + VkImageLayout dstImageLayout; + uint32_t regionCount; + const VkImageCopy2* pRegions; +} VkCopyImageToImageInfoEXT; + +typedef struct VkHostImageLayoutTransitionInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; + VkImageLayout oldLayout; + VkImageLayout newLayout; + VkImageSubresourceRange subresourceRange; +} VkHostImageLayoutTransitionInfoEXT; + +typedef struct VkSubresourceHostMemcpySizeEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize size; +} VkSubresourceHostMemcpySizeEXT; + +typedef struct VkHostImageCopyDevicePerformanceQueryEXT { + VkStructureType sType; + void* pNext; + VkBool32 optimalDeviceAccess; + VkBool32 identicalMemoryLayout; +} VkHostImageCopyDevicePerformanceQueryEXT; + +typedef VkSubresourceLayout2KHR VkSubresourceLayout2EXT; + +typedef VkImageSubresource2KHR VkImageSubresource2EXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToImageEXT)(VkDevice device, const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToMemoryEXT)(VkDevice device, const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyImageToImageEXT)(VkDevice device, const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo); +typedef VkResult (VKAPI_PTR *PFN_vkTransitionImageLayoutEXT)(VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfoEXT* pTransitions); +typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2KHR* pSubresource, VkSubresourceLayout2KHR* pLayout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToImageEXT( + VkDevice device, + const VkCopyMemoryToImageInfoEXT* pCopyMemoryToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToMemoryEXT( + VkDevice device, + const VkCopyImageToMemoryInfoEXT* pCopyImageToMemoryInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyImageToImageEXT( + VkDevice device, + const VkCopyImageToImageInfoEXT* pCopyImageToImageInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkTransitionImageLayoutEXT( + VkDevice device, + uint32_t transitionCount, + const VkHostImageLayoutTransitionInfoEXT* pTransitions); + +VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT( + VkDevice device, + VkImage image, + const VkImageSubresource2KHR* pSubresource, + VkSubresourceLayout2KHR* pLayout); +#endif + + +// VK_EXT_map_memory_placed is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_map_memory_placed 1 +#define VK_EXT_MAP_MEMORY_PLACED_SPEC_VERSION 1 +#define VK_EXT_MAP_MEMORY_PLACED_EXTENSION_NAME "VK_EXT_map_memory_placed" +typedef struct VkPhysicalDeviceMapMemoryPlacedFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 memoryMapPlaced; + VkBool32 memoryMapRangePlaced; + VkBool32 memoryUnmapReserve; +} VkPhysicalDeviceMapMemoryPlacedFeaturesEXT; + +typedef struct VkPhysicalDeviceMapMemoryPlacedPropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize minPlacedMemoryMapAlignment; +} VkPhysicalDeviceMapMemoryPlacedPropertiesEXT; + +typedef struct VkMemoryMapPlacedInfoEXT { + VkStructureType sType; + const void* pNext; + void* pPlacedAddress; +} VkMemoryMapPlacedInfoEXT; + + + +// VK_EXT_shader_atomic_float2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_atomic_float2 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION 1 +#define VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME "VK_EXT_shader_atomic_float2" +typedef struct VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderBufferFloat16Atomics; + VkBool32 shaderBufferFloat16AtomicAdd; + VkBool32 shaderBufferFloat16AtomicMinMax; + VkBool32 shaderBufferFloat32AtomicMinMax; + VkBool32 shaderBufferFloat64AtomicMinMax; + VkBool32 shaderSharedFloat16Atomics; + VkBool32 shaderSharedFloat16AtomicAdd; + VkBool32 shaderSharedFloat16AtomicMinMax; + VkBool32 shaderSharedFloat32AtomicMinMax; + VkBool32 shaderSharedFloat64AtomicMinMax; + VkBool32 shaderImageFloat32AtomicMinMax; + VkBool32 sparseImageFloat32AtomicMinMax; +} VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT; + + + +// VK_EXT_surface_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_surface_maintenance1 1 +#define VK_EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_surface_maintenance1" + +typedef enum VkPresentScalingFlagBitsEXT { + VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT = 0x00000001, + VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT = 0x00000002, + VK_PRESENT_SCALING_STRETCH_BIT_EXT = 0x00000004, + VK_PRESENT_SCALING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPresentScalingFlagBitsEXT; +typedef VkFlags VkPresentScalingFlagsEXT; + +typedef enum VkPresentGravityFlagBitsEXT { + VK_PRESENT_GRAVITY_MIN_BIT_EXT = 0x00000001, + VK_PRESENT_GRAVITY_MAX_BIT_EXT = 0x00000002, + VK_PRESENT_GRAVITY_CENTERED_BIT_EXT = 0x00000004, + VK_PRESENT_GRAVITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkPresentGravityFlagBitsEXT; +typedef VkFlags VkPresentGravityFlagsEXT; +typedef struct VkSurfacePresentModeEXT { + VkStructureType sType; + void* pNext; + VkPresentModeKHR presentMode; +} VkSurfacePresentModeEXT; + +typedef struct VkSurfacePresentScalingCapabilitiesEXT { + VkStructureType sType; + void* pNext; + VkPresentScalingFlagsEXT supportedPresentScaling; + VkPresentGravityFlagsEXT supportedPresentGravityX; + VkPresentGravityFlagsEXT supportedPresentGravityY; + VkExtent2D minScaledImageExtent; + VkExtent2D maxScaledImageExtent; +} VkSurfacePresentScalingCapabilitiesEXT; + +typedef struct VkSurfacePresentModeCompatibilityEXT { + VkStructureType sType; + void* pNext; + uint32_t presentModeCount; + VkPresentModeKHR* pPresentModes; +} VkSurfacePresentModeCompatibilityEXT; + + + +// VK_EXT_swapchain_maintenance1 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_swapchain_maintenance1 1 +#define VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION 1 +#define VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_swapchain_maintenance1" +typedef struct VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 swapchainMaintenance1; +} VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT; + +typedef struct VkSwapchainPresentFenceInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkFence* pFences; +} VkSwapchainPresentFenceInfoEXT; + +typedef struct VkSwapchainPresentModesCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t presentModeCount; + const VkPresentModeKHR* pPresentModes; +} VkSwapchainPresentModesCreateInfoEXT; + +typedef struct VkSwapchainPresentModeInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t swapchainCount; + const VkPresentModeKHR* pPresentModes; +} VkSwapchainPresentModeInfoEXT; + +typedef struct VkSwapchainPresentScalingCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkPresentScalingFlagsEXT scalingBehavior; + VkPresentGravityFlagsEXT presentGravityX; + VkPresentGravityFlagsEXT presentGravityY; +} VkSwapchainPresentScalingCreateInfoEXT; + +typedef struct VkReleaseSwapchainImagesInfoEXT { + VkStructureType sType; + const void* pNext; + VkSwapchainKHR swapchain; + uint32_t imageIndexCount; + const uint32_t* pImageIndices; +} VkReleaseSwapchainImagesInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkReleaseSwapchainImagesEXT)(VkDevice device, const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseSwapchainImagesEXT( + VkDevice device, + const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo); +#endif + + +// VK_EXT_shader_demote_to_helper_invocation is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_demote_to_helper_invocation 1 +#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1 +#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation" +typedef VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT; + + + +// VK_NV_device_generated_commands is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV) +#define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 +#define VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NV_device_generated_commands" + +typedef enum VkIndirectCommandsTokenTypeNV { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV = 1000328000, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NV = 1000428003, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NV = 1000428004, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeNV; + +typedef enum VkIndirectStateFlagBitsNV { + VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV = 0x00000001, + VK_INDIRECT_STATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectStateFlagBitsNV; +typedef VkFlags VkIndirectStateFlagsNV; + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsNV { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV = 0x00000004, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsNV; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t maxGraphicsShaderGroupCount; + uint32_t maxIndirectSequenceCount; + uint32_t maxIndirectCommandsTokenCount; + uint32_t maxIndirectCommandsStreamCount; + uint32_t maxIndirectCommandsTokenOffset; + uint32_t maxIndirectCommandsStreamStride; + uint32_t minSequencesCountBufferOffsetAlignment; + uint32_t minSequencesIndexBufferOffsetAlignment; + uint32_t minIndirectCommandsBufferOffsetAlignment; +} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV; + +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCommands; +} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV; + +typedef struct VkGraphicsShaderGroupCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + const VkPipelineVertexInputStateCreateInfo* pVertexInputState; + const VkPipelineTessellationStateCreateInfo* pTessellationState; +} VkGraphicsShaderGroupCreateInfoNV; + +typedef struct VkGraphicsPipelineShaderGroupsCreateInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t groupCount; + const VkGraphicsShaderGroupCreateInfoNV* pGroups; + uint32_t pipelineCount; + const VkPipeline* pPipelines; +} VkGraphicsPipelineShaderGroupsCreateInfoNV; + +typedef struct VkBindShaderGroupIndirectCommandNV { + uint32_t groupIndex; +} VkBindShaderGroupIndirectCommandNV; + +typedef struct VkBindIndexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + VkIndexType indexType; +} VkBindIndexBufferIndirectCommandNV; + +typedef struct VkBindVertexBufferIndirectCommandNV { + VkDeviceAddress bufferAddress; + uint32_t size; + uint32_t stride; +} VkBindVertexBufferIndirectCommandNV; + +typedef struct VkSetStateFlagsIndirectCommandNV { + uint32_t data; +} VkSetStateFlagsIndirectCommandNV; + +typedef struct VkIndirectCommandsStreamNV { + VkBuffer buffer; + VkDeviceSize offset; +} VkIndirectCommandsStreamNV; + +typedef struct VkIndirectCommandsLayoutTokenNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsTokenTypeNV tokenType; + uint32_t stream; + uint32_t offset; + uint32_t vertexBindingUnit; + VkBool32 vertexDynamicStride; + VkPipelineLayout pushconstantPipelineLayout; + VkShaderStageFlags pushconstantShaderStageFlags; + uint32_t pushconstantOffset; + uint32_t pushconstantSize; + VkIndirectStateFlagsNV indirectStateFlags; + uint32_t indexTypeCount; + const VkIndexType* pIndexTypes; + const uint32_t* pIndexTypeValues; +} VkIndirectCommandsLayoutTokenNV; + +typedef struct VkIndirectCommandsLayoutCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsLayoutUsageFlagsNV flags; + VkPipelineBindPoint pipelineBindPoint; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenNV* pTokens; + uint32_t streamCount; + const uint32_t* pStreamStrides; +} VkIndirectCommandsLayoutCreateInfoNV; + +typedef struct VkGeneratedCommandsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t streamCount; + const VkIndirectCommandsStreamNV* pStreams; + uint32_t sequencesCount; + VkBuffer preprocessBuffer; + VkDeviceSize preprocessOffset; + VkDeviceSize preprocessSize; + VkBuffer sequencesCountBuffer; + VkDeviceSize sequencesCountOffset; + VkBuffer sequencesIndexBuffer; + VkDeviceSize sequencesIndexOffset; +} VkGeneratedCommandsInfoNV; + +typedef struct VkGeneratedCommandsMemoryRequirementsInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; + VkIndirectCommandsLayoutNV indirectCommandsLayout; + uint32_t maxSequencesCount; +} VkGeneratedCommandsMemoryRequirementsInfoNV; + +typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsNV)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsNV)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsNV)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); +typedef void (VKAPI_PTR *PFN_vkCmdBindPipelineShaderGroupNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNV)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNV)(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsNV( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsNV( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindPipelineShaderGroupNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline, + uint32_t groupIndex); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNV( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutNV* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV( + VkDevice device, + VkIndirectCommandsLayoutNV indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); +#endif + + +// VK_NV_inherited_viewport_scissor is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_inherited_viewport_scissor 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION 1 +#define VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME "VK_NV_inherited_viewport_scissor" +typedef struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 inheritedViewportScissor2D; +} VkPhysicalDeviceInheritedViewportScissorFeaturesNV; + +typedef struct VkCommandBufferInheritanceViewportScissorInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 viewportScissor2D; + uint32_t viewportDepthCount; + const VkViewport* pViewportDepths; +} VkCommandBufferInheritanceViewportScissorInfoNV; + + + +// VK_EXT_texel_buffer_alignment is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_texel_buffer_alignment 1 +#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1 +#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment" +typedef struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 texelBufferAlignment; +} VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT; + +typedef VkPhysicalDeviceTexelBufferAlignmentProperties VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT; + + + +// VK_QCOM_render_pass_transform is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_render_pass_transform 1 +#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 4 +#define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform" +typedef struct VkRenderPassTransformBeginInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkRenderPassTransformBeginInfoQCOM; + +typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM { + VkStructureType sType; + void* pNext; + VkSurfaceTransformFlagBitsKHR transform; + VkRect2D renderArea; +} VkCommandBufferInheritanceRenderPassTransformInfoQCOM; + + + +// VK_EXT_depth_bias_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_bias_control 1 +#define VK_EXT_DEPTH_BIAS_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_BIAS_CONTROL_EXTENSION_NAME "VK_EXT_depth_bias_control" + +typedef enum VkDepthBiasRepresentationEXT { + VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORMAT_EXT = 0, + VK_DEPTH_BIAS_REPRESENTATION_LEAST_REPRESENTABLE_VALUE_FORCE_UNORM_EXT = 1, + VK_DEPTH_BIAS_REPRESENTATION_FLOAT_EXT = 2, + VK_DEPTH_BIAS_REPRESENTATION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDepthBiasRepresentationEXT; +typedef struct VkPhysicalDeviceDepthBiasControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthBiasControl; + VkBool32 leastRepresentableValueForceUnormRepresentation; + VkBool32 floatRepresentation; + VkBool32 depthBiasExact; +} VkPhysicalDeviceDepthBiasControlFeaturesEXT; + +typedef struct VkDepthBiasInfoEXT { + VkStructureType sType; + const void* pNext; + float depthBiasConstantFactor; + float depthBiasClamp; + float depthBiasSlopeFactor; +} VkDepthBiasInfoEXT; + +typedef struct VkDepthBiasRepresentationInfoEXT { + VkStructureType sType; + const void* pNext; + VkDepthBiasRepresentationEXT depthBiasRepresentation; + VkBool32 depthBiasExact; +} VkDepthBiasRepresentationInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias2EXT)(VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT* pDepthBiasInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias2EXT( + VkCommandBuffer commandBuffer, + const VkDepthBiasInfoEXT* pDepthBiasInfo); +#endif + + +// VK_EXT_device_memory_report is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_memory_report 1 +#define VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION 2 +#define VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME "VK_EXT_device_memory_report" + +typedef enum VkDeviceMemoryReportEventTypeEXT { + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT = 0, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT = 1, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT = 2, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT = 3, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT = 4, + VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceMemoryReportEventTypeEXT; +typedef VkFlags VkDeviceMemoryReportFlagsEXT; +typedef struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceMemoryReport; +} VkPhysicalDeviceDeviceMemoryReportFeaturesEXT; + +typedef struct VkDeviceMemoryReportCallbackDataEXT { + VkStructureType sType; + void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + VkDeviceMemoryReportEventTypeEXT type; + uint64_t memoryObjectId; + VkDeviceSize size; + VkObjectType objectType; + uint64_t objectHandle; + uint32_t heapIndex; +} VkDeviceMemoryReportCallbackDataEXT; + +typedef void (VKAPI_PTR *PFN_vkDeviceMemoryReportCallbackEXT)( + const VkDeviceMemoryReportCallbackDataEXT* pCallbackData, + void* pUserData); + +typedef struct VkDeviceDeviceMemoryReportCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemoryReportFlagsEXT flags; + PFN_vkDeviceMemoryReportCallbackEXT pfnUserCallback; + void* pUserData; +} VkDeviceDeviceMemoryReportCreateInfoEXT; + + + +// VK_EXT_acquire_drm_display is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_acquire_drm_display 1 +#define VK_EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_drm_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR* display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireDrmDisplayEXT( + VkPhysicalDevice physicalDevice, + int32_t drmFd, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDrmDisplayEXT( + VkPhysicalDevice physicalDevice, + int32_t drmFd, + uint32_t connectorId, + VkDisplayKHR* display); +#endif + + +// VK_EXT_robustness2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_robustness2 1 +#define VK_EXT_ROBUSTNESS_2_SPEC_VERSION 1 +#define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2" +typedef struct VkPhysicalDeviceRobustness2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 robustBufferAccess2; + VkBool32 robustImageAccess2; + VkBool32 nullDescriptor; +} VkPhysicalDeviceRobustness2FeaturesEXT; + +typedef struct VkPhysicalDeviceRobustness2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkDeviceSize robustStorageBufferAccessSizeAlignment; + VkDeviceSize robustUniformBufferAccessSizeAlignment; +} VkPhysicalDeviceRobustness2PropertiesEXT; + + + +// VK_EXT_custom_border_color is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_custom_border_color 1 +#define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12 +#define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color" +typedef struct VkSamplerCustomBorderColorCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkClearColorValue customBorderColor; + VkFormat format; +} VkSamplerCustomBorderColorCreateInfoEXT; + +typedef struct VkPhysicalDeviceCustomBorderColorPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxCustomBorderColorSamplers; +} VkPhysicalDeviceCustomBorderColorPropertiesEXT; + +typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 customBorderColors; + VkBool32 customBorderColorWithoutFormat; +} VkPhysicalDeviceCustomBorderColorFeaturesEXT; + + + +// VK_GOOGLE_user_type is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_user_type 1 +#define VK_GOOGLE_USER_TYPE_SPEC_VERSION 1 +#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type" + + +// VK_NV_present_barrier is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_present_barrier 1 +#define VK_NV_PRESENT_BARRIER_SPEC_VERSION 1 +#define VK_NV_PRESENT_BARRIER_EXTENSION_NAME "VK_NV_present_barrier" +typedef struct VkPhysicalDevicePresentBarrierFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 presentBarrier; +} VkPhysicalDevicePresentBarrierFeaturesNV; + +typedef struct VkSurfaceCapabilitiesPresentBarrierNV { + VkStructureType sType; + void* pNext; + VkBool32 presentBarrierSupported; +} VkSurfaceCapabilitiesPresentBarrierNV; + +typedef struct VkSwapchainPresentBarrierCreateInfoNV { + VkStructureType sType; + void* pNext; + VkBool32 presentBarrierEnable; +} VkSwapchainPresentBarrierCreateInfoNV; + + + +// VK_EXT_private_data is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_private_data 1 +typedef VkPrivateDataSlot VkPrivateDataSlotEXT; + +#define VK_EXT_PRIVATE_DATA_SPEC_VERSION 1 +#define VK_EXT_PRIVATE_DATA_EXTENSION_NAME "VK_EXT_private_data" +typedef VkPrivateDataSlotCreateFlags VkPrivateDataSlotCreateFlagsEXT; + +typedef VkPhysicalDevicePrivateDataFeatures VkPhysicalDevicePrivateDataFeaturesEXT; + +typedef VkDevicePrivateDataCreateInfo VkDevicePrivateDataCreateInfoEXT; + +typedef VkPrivateDataSlotCreateInfo VkPrivateDataSlotCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlotEXT)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot); +typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlotEXT)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data); +typedef void (VKAPI_PTR *PFN_vkGetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlotEXT( + VkDevice device, + const VkPrivateDataSlotCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkPrivateDataSlot* pPrivateDataSlot); + +VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlotEXT( + VkDevice device, + VkPrivateDataSlot privateDataSlot, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t data); + +VKAPI_ATTR void VKAPI_CALL vkGetPrivateDataEXT( + VkDevice device, + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t* pData); +#endif + + +// VK_EXT_pipeline_creation_cache_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_creation_cache_control 1 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 3 +#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control" +typedef VkPhysicalDevicePipelineCreationCacheControlFeatures VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT; + + + +// VK_NV_device_diagnostics_config is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_diagnostics_config 1 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 2 +#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config" + +typedef enum VkDeviceDiagnosticsConfigFlagBitsNV { + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004, + VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV = 0x00000008, + VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkDeviceDiagnosticsConfigFlagBitsNV; +typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV; +typedef struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 diagnosticsConfig; +} VkPhysicalDeviceDiagnosticsConfigFeaturesNV; + +typedef struct VkDeviceDiagnosticsConfigCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceDiagnosticsConfigFlagsNV flags; +} VkDeviceDiagnosticsConfigCreateInfoNV; + + + +// VK_QCOM_render_pass_store_ops is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_render_pass_store_ops 1 +#define VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION 2 +#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops" + + +// VK_NV_cuda_kernel_launch is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_cuda_kernel_launch 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCudaModuleNV) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCudaFunctionNV) +#define VK_NV_CUDA_KERNEL_LAUNCH_SPEC_VERSION 2 +#define VK_NV_CUDA_KERNEL_LAUNCH_EXTENSION_NAME "VK_NV_cuda_kernel_launch" +typedef struct VkCudaModuleCreateInfoNV { + VkStructureType sType; + const void* pNext; + size_t dataSize; + const void* pData; +} VkCudaModuleCreateInfoNV; + +typedef struct VkCudaFunctionCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkCudaModuleNV module; + const char* pName; +} VkCudaFunctionCreateInfoNV; + +typedef struct VkCudaLaunchInfoNV { + VkStructureType sType; + const void* pNext; + VkCudaFunctionNV function; + uint32_t gridDimX; + uint32_t gridDimY; + uint32_t gridDimZ; + uint32_t blockDimX; + uint32_t blockDimY; + uint32_t blockDimZ; + uint32_t sharedMemBytes; + size_t paramCount; + const void* const * pParams; + size_t extraCount; + const void* const * pExtras; +} VkCudaLaunchInfoNV; + +typedef struct VkPhysicalDeviceCudaKernelLaunchFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 cudaKernelLaunchFeatures; +} VkPhysicalDeviceCudaKernelLaunchFeaturesNV; + +typedef struct VkPhysicalDeviceCudaKernelLaunchPropertiesNV { + VkStructureType sType; + void* pNext; + uint32_t computeCapabilityMinor; + uint32_t computeCapabilityMajor; +} VkPhysicalDeviceCudaKernelLaunchPropertiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateCudaModuleNV)(VkDevice device, const VkCudaModuleCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaModuleNV* pModule); +typedef VkResult (VKAPI_PTR *PFN_vkGetCudaModuleCacheNV)(VkDevice device, VkCudaModuleNV module, size_t* pCacheSize, void* pCacheData); +typedef VkResult (VKAPI_PTR *PFN_vkCreateCudaFunctionNV)(VkDevice device, const VkCudaFunctionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCudaFunctionNV* pFunction); +typedef void (VKAPI_PTR *PFN_vkDestroyCudaModuleNV)(VkDevice device, VkCudaModuleNV module, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkDestroyCudaFunctionNV)(VkDevice device, VkCudaFunctionNV function, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdCudaLaunchKernelNV)(VkCommandBuffer commandBuffer, const VkCudaLaunchInfoNV* pLaunchInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCudaModuleNV( + VkDevice device, + const VkCudaModuleCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCudaModuleNV* pModule); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetCudaModuleCacheNV( + VkDevice device, + VkCudaModuleNV module, + size_t* pCacheSize, + void* pCacheData); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateCudaFunctionNV( + VkDevice device, + const VkCudaFunctionCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkCudaFunctionNV* pFunction); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCudaModuleNV( + VkDevice device, + VkCudaModuleNV module, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkDestroyCudaFunctionNV( + VkDevice device, + VkCudaFunctionNV function, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdCudaLaunchKernelNV( + VkCommandBuffer commandBuffer, + const VkCudaLaunchInfoNV* pLaunchInfo); +#endif + + +// VK_NV_low_latency is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_low_latency 1 +#define VK_NV_LOW_LATENCY_SPEC_VERSION 1 +#define VK_NV_LOW_LATENCY_EXTENSION_NAME "VK_NV_low_latency" +typedef struct VkQueryLowLatencySupportNV { + VkStructureType sType; + const void* pNext; + void* pQueriedLowLatencyData; +} VkQueryLowLatencySupportNV; + + + +// VK_EXT_descriptor_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_descriptor_buffer 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR) +#define VK_EXT_DESCRIPTOR_BUFFER_SPEC_VERSION 1 +#define VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME "VK_EXT_descriptor_buffer" +typedef struct VkPhysicalDeviceDescriptorBufferPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 combinedImageSamplerDescriptorSingleArray; + VkBool32 bufferlessPushDescriptors; + VkBool32 allowSamplerImageViewPostSubmitCreation; + VkDeviceSize descriptorBufferOffsetAlignment; + uint32_t maxDescriptorBufferBindings; + uint32_t maxResourceDescriptorBufferBindings; + uint32_t maxSamplerDescriptorBufferBindings; + uint32_t maxEmbeddedImmutableSamplerBindings; + uint32_t maxEmbeddedImmutableSamplers; + size_t bufferCaptureReplayDescriptorDataSize; + size_t imageCaptureReplayDescriptorDataSize; + size_t imageViewCaptureReplayDescriptorDataSize; + size_t samplerCaptureReplayDescriptorDataSize; + size_t accelerationStructureCaptureReplayDescriptorDataSize; + size_t samplerDescriptorSize; + size_t combinedImageSamplerDescriptorSize; + size_t sampledImageDescriptorSize; + size_t storageImageDescriptorSize; + size_t uniformTexelBufferDescriptorSize; + size_t robustUniformTexelBufferDescriptorSize; + size_t storageTexelBufferDescriptorSize; + size_t robustStorageTexelBufferDescriptorSize; + size_t uniformBufferDescriptorSize; + size_t robustUniformBufferDescriptorSize; + size_t storageBufferDescriptorSize; + size_t robustStorageBufferDescriptorSize; + size_t inputAttachmentDescriptorSize; + size_t accelerationStructureDescriptorSize; + VkDeviceSize maxSamplerDescriptorBufferRange; + VkDeviceSize maxResourceDescriptorBufferRange; + VkDeviceSize samplerDescriptorBufferAddressSpaceSize; + VkDeviceSize resourceDescriptorBufferAddressSpaceSize; + VkDeviceSize descriptorBufferAddressSpaceSize; +} VkPhysicalDeviceDescriptorBufferPropertiesEXT; + +typedef struct VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT { + VkStructureType sType; + void* pNext; + size_t combinedImageSamplerDensityMapDescriptorSize; +} VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT; + +typedef struct VkPhysicalDeviceDescriptorBufferFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 descriptorBuffer; + VkBool32 descriptorBufferCaptureReplay; + VkBool32 descriptorBufferImageLayoutIgnored; + VkBool32 descriptorBufferPushDescriptors; +} VkPhysicalDeviceDescriptorBufferFeaturesEXT; + +typedef struct VkDescriptorAddressInfoEXT { + VkStructureType sType; + void* pNext; + VkDeviceAddress address; + VkDeviceSize range; + VkFormat format; +} VkDescriptorAddressInfoEXT; + +typedef struct VkDescriptorBufferBindingInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceAddress address; + VkBufferUsageFlags usage; +} VkDescriptorBufferBindingInfoEXT; + +typedef struct VkDescriptorBufferBindingPushDescriptorBufferHandleEXT { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkDescriptorBufferBindingPushDescriptorBufferHandleEXT; + +typedef union VkDescriptorDataEXT { + const VkSampler* pSampler; + const VkDescriptorImageInfo* pCombinedImageSampler; + const VkDescriptorImageInfo* pInputAttachmentImage; + const VkDescriptorImageInfo* pSampledImage; + const VkDescriptorImageInfo* pStorageImage; + const VkDescriptorAddressInfoEXT* pUniformTexelBuffer; + const VkDescriptorAddressInfoEXT* pStorageTexelBuffer; + const VkDescriptorAddressInfoEXT* pUniformBuffer; + const VkDescriptorAddressInfoEXT* pStorageBuffer; + VkDeviceAddress accelerationStructure; +} VkDescriptorDataEXT; + +typedef struct VkDescriptorGetInfoEXT { + VkStructureType sType; + const void* pNext; + VkDescriptorType type; + VkDescriptorDataEXT data; +} VkDescriptorGetInfoEXT; + +typedef struct VkBufferCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkBuffer buffer; +} VkBufferCaptureDescriptorDataInfoEXT; + +typedef struct VkImageCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; +} VkImageCaptureDescriptorDataInfoEXT; + +typedef struct VkImageViewCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageView imageView; +} VkImageViewCaptureDescriptorDataInfoEXT; + +typedef struct VkSamplerCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkSampler sampler; +} VkSamplerCaptureDescriptorDataInfoEXT; + +typedef struct VkOpaqueCaptureDescriptorDataCreateInfoEXT { + VkStructureType sType; + const void* pNext; + const void* opaqueCaptureDescriptorData; +} VkOpaqueCaptureDescriptorDataCreateInfoEXT; + +typedef struct VkAccelerationStructureCaptureDescriptorDataInfoEXT { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR accelerationStructure; + VkAccelerationStructureNV accelerationStructureNV; +} VkAccelerationStructureCaptureDescriptorDataInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSizeEXT)(VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize* pLayoutSizeInBytes); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutBindingOffsetEXT)(VkDevice device, VkDescriptorSetLayout layout, uint32_t binding, VkDeviceSize* pOffset); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorEXT)(VkDevice device, const VkDescriptorGetInfoEXT* pDescriptorInfo, size_t dataSize, void* pDescriptor); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT* pBindingInfos); +typedef void (VKAPI_PTR *PFN_vkCmdSetDescriptorBufferOffsetsEXT)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t* pBufferIndices, const VkDeviceSize* pOffsets); +typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set); +typedef VkResult (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkImageCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, void* pData); +typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, void* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSizeEXT( + VkDevice device, + VkDescriptorSetLayout layout, + VkDeviceSize* pLayoutSizeInBytes); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutBindingOffsetEXT( + VkDevice device, + VkDescriptorSetLayout layout, + uint32_t binding, + VkDeviceSize* pOffset); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorEXT( + VkDevice device, + const VkDescriptorGetInfoEXT* pDescriptorInfo, + size_t dataSize, + void* pDescriptor); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBuffersEXT( + VkCommandBuffer commandBuffer, + uint32_t bufferCount, + const VkDescriptorBufferBindingInfoEXT* pBindingInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDescriptorBufferOffsetsEXT( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t firstSet, + uint32_t setCount, + const uint32_t* pBufferIndices, + const VkDeviceSize* pOffsets); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplersEXT( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipelineLayout layout, + uint32_t set); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkBufferCaptureDescriptorDataInfoEXT* pInfo, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkImageCaptureDescriptorDataInfoEXT* pInfo, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSamplerOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, + void* pData); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( + VkDevice device, + const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, + void* pData); +#endif + + +// VK_EXT_graphics_pipeline_library is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_graphics_pipeline_library 1 +#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION 1 +#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME "VK_EXT_graphics_pipeline_library" + +typedef enum VkGraphicsPipelineLibraryFlagBitsEXT { + VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT = 0x00000001, + VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT = 0x00000002, + VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT = 0x00000004, + VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT = 0x00000008, + VK_GRAPHICS_PIPELINE_LIBRARY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkGraphicsPipelineLibraryFlagBitsEXT; +typedef VkFlags VkGraphicsPipelineLibraryFlagsEXT; +typedef struct VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 graphicsPipelineLibrary; +} VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT; + +typedef struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 graphicsPipelineLibraryFastLinking; + VkBool32 graphicsPipelineLibraryIndependentInterpolationDecoration; +} VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT; + +typedef struct VkGraphicsPipelineLibraryCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkGraphicsPipelineLibraryFlagsEXT flags; +} VkGraphicsPipelineLibraryCreateInfoEXT; + + + +// VK_AMD_shader_early_and_late_fragment_tests is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_shader_early_and_late_fragment_tests 1 +#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION 1 +#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME "VK_AMD_shader_early_and_late_fragment_tests" +typedef struct VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD { + VkStructureType sType; + void* pNext; + VkBool32 shaderEarlyAndLateFragmentTests; +} VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD; + + + +// VK_NV_fragment_shading_rate_enums is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_fragment_shading_rate_enums 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION 1 +#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME "VK_NV_fragment_shading_rate_enums" + +typedef enum VkFragmentShadingRateTypeNV { + VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV = 0, + VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV = 1, + VK_FRAGMENT_SHADING_RATE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateTypeNV; + +typedef enum VkFragmentShadingRateNV { + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV = 0, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV = 1, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV = 4, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV = 5, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV = 6, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV = 9, + VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV = 10, + VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV = 11, + VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV = 12, + VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV = 13, + VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV = 14, + VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV = 15, + VK_FRAGMENT_SHADING_RATE_MAX_ENUM_NV = 0x7FFFFFFF +} VkFragmentShadingRateNV; +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 fragmentShadingRateEnums; + VkBool32 supersampleFragmentShadingRates; + VkBool32 noInvocationFragmentShadingRates; +} VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV; + +typedef struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV { + VkStructureType sType; + void* pNext; + VkSampleCountFlagBits maxFragmentShadingRateInvocationCount; +} VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV; + +typedef struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkFragmentShadingRateTypeNV shadingRateType; + VkFragmentShadingRateNV shadingRate; + VkFragmentShadingRateCombinerOpKHR combinerOps[2]; +} VkPipelineFragmentShadingRateEnumStateCreateInfoNV; + +typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateEnumNV)(VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateEnumNV( + VkCommandBuffer commandBuffer, + VkFragmentShadingRateNV shadingRate, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); +#endif + + +// VK_NV_ray_tracing_motion_blur is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_motion_blur 1 +#define VK_NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME "VK_NV_ray_tracing_motion_blur" + +typedef enum VkAccelerationStructureMotionInstanceTypeNV { + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV = 0, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV = 1, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV = 2, + VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkAccelerationStructureMotionInstanceTypeNV; +typedef VkFlags VkAccelerationStructureMotionInfoFlagsNV; +typedef VkFlags VkAccelerationStructureMotionInstanceFlagsNV; +typedef union VkDeviceOrHostAddressConstKHR { + VkDeviceAddress deviceAddress; + const void* hostAddress; +} VkDeviceOrHostAddressConstKHR; + +typedef struct VkAccelerationStructureGeometryMotionTrianglesDataNV { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR vertexData; +} VkAccelerationStructureGeometryMotionTrianglesDataNV; + +typedef struct VkAccelerationStructureMotionInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t maxInstances; + VkAccelerationStructureMotionInfoFlagsNV flags; +} VkAccelerationStructureMotionInfoNV; + +typedef struct VkAccelerationStructureMatrixMotionInstanceNV { + VkTransformMatrixKHR transformT0; + VkTransformMatrixKHR transformT1; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureMatrixMotionInstanceNV; + +typedef struct VkSRTDataNV { + float sx; + float a; + float b; + float pvx; + float sy; + float c; + float pvy; + float sz; + float pvz; + float qx; + float qy; + float qz; + float qw; + float tx; + float ty; + float tz; +} VkSRTDataNV; + +typedef struct VkAccelerationStructureSRTMotionInstanceNV { + VkSRTDataNV transformT0; + VkSRTDataNV transformT1; + uint32_t instanceCustomIndex:24; + uint32_t mask:8; + uint32_t instanceShaderBindingTableRecordOffset:24; + VkGeometryInstanceFlagsKHR flags:8; + uint64_t accelerationStructureReference; +} VkAccelerationStructureSRTMotionInstanceNV; + +typedef union VkAccelerationStructureMotionInstanceDataNV { + VkAccelerationStructureInstanceKHR staticInstance; + VkAccelerationStructureMatrixMotionInstanceNV matrixMotionInstance; + VkAccelerationStructureSRTMotionInstanceNV srtMotionInstance; +} VkAccelerationStructureMotionInstanceDataNV; + +typedef struct VkAccelerationStructureMotionInstanceNV { + VkAccelerationStructureMotionInstanceTypeNV type; + VkAccelerationStructureMotionInstanceFlagsNV flags; + VkAccelerationStructureMotionInstanceDataNV data; +} VkAccelerationStructureMotionInstanceNV; + +typedef struct VkPhysicalDeviceRayTracingMotionBlurFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingMotionBlur; + VkBool32 rayTracingMotionBlurPipelineTraceRaysIndirect; +} VkPhysicalDeviceRayTracingMotionBlurFeaturesNV; + + + +// VK_EXT_ycbcr_2plane_444_formats is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_ycbcr_2plane_444_formats 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME "VK_EXT_ycbcr_2plane_444_formats" +typedef struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 ycbcr2plane444Formats; +} VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT; + + + +// VK_EXT_fragment_density_map2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_fragment_density_map2 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1 +#define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2" +typedef struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMapDeferred; +} VkPhysicalDeviceFragmentDensityMap2FeaturesEXT; + +typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subsampledLoads; + VkBool32 subsampledCoarseReconstructionEarlyAccess; + uint32_t maxSubsampledArrayLayers; + uint32_t maxDescriptorSetSubsampledSamplers; +} VkPhysicalDeviceFragmentDensityMap2PropertiesEXT; + + + +// VK_QCOM_rotated_copy_commands is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_rotated_copy_commands 1 +#define VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION 2 +#define VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME "VK_QCOM_rotated_copy_commands" +typedef struct VkCopyCommandTransformInfoQCOM { + VkStructureType sType; + const void* pNext; + VkSurfaceTransformFlagBitsKHR transform; +} VkCopyCommandTransformInfoQCOM; + + + +// VK_EXT_image_robustness is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_robustness 1 +#define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1 +#define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness" +typedef VkPhysicalDeviceImageRobustnessFeatures VkPhysicalDeviceImageRobustnessFeaturesEXT; + + + +// VK_EXT_image_compression_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_compression_control 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME "VK_EXT_image_compression_control" + +typedef enum VkImageCompressionFlagBitsEXT { + VK_IMAGE_COMPRESSION_DEFAULT_EXT = 0, + VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT = 0x00000001, + VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT = 0x00000002, + VK_IMAGE_COMPRESSION_DISABLED_EXT = 0x00000004, + VK_IMAGE_COMPRESSION_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkImageCompressionFlagBitsEXT; +typedef VkFlags VkImageCompressionFlagsEXT; + +typedef enum VkImageCompressionFixedRateFlagBitsEXT { + VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT = 0, + VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT = 0x00000001, + VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT = 0x00000002, + VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT = 0x00000004, + VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT = 0x00000008, + VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT = 0x00000010, + VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT = 0x00000020, + VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT = 0x00000040, + VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT = 0x00000080, + VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT = 0x00000100, + VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT = 0x00000200, + VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT = 0x00000400, + VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT = 0x00000800, + VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT = 0x00001000, + VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT = 0x00002000, + VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT = 0x00004000, + VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT = 0x00008000, + VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT = 0x00010000, + VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT = 0x00020000, + VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT = 0x00040000, + VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT = 0x00080000, + VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT = 0x00100000, + VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT = 0x00200000, + VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT = 0x00400000, + VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT = 0x00800000, + VK_IMAGE_COMPRESSION_FIXED_RATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkImageCompressionFixedRateFlagBitsEXT; +typedef VkFlags VkImageCompressionFixedRateFlagsEXT; +typedef struct VkPhysicalDeviceImageCompressionControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 imageCompressionControl; +} VkPhysicalDeviceImageCompressionControlFeaturesEXT; + +typedef struct VkImageCompressionControlEXT { + VkStructureType sType; + const void* pNext; + VkImageCompressionFlagsEXT flags; + uint32_t compressionControlPlaneCount; + VkImageCompressionFixedRateFlagsEXT* pFixedRateFlags; +} VkImageCompressionControlEXT; + +typedef struct VkImageCompressionPropertiesEXT { + VkStructureType sType; + void* pNext; + VkImageCompressionFlagsEXT imageCompressionFlags; + VkImageCompressionFixedRateFlagsEXT imageCompressionFixedRateFlags; +} VkImageCompressionPropertiesEXT; + + + +// VK_EXT_attachment_feedback_loop_layout is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_attachment_feedback_loop_layout 1 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION 2 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_layout" +typedef struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 attachmentFeedbackLoopLayout; +} VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT; + + + +// VK_EXT_4444_formats is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_4444_formats 1 +#define VK_EXT_4444_FORMATS_SPEC_VERSION 1 +#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats" +typedef struct VkPhysicalDevice4444FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 formatA4R4G4B4; + VkBool32 formatA4B4G4R4; +} VkPhysicalDevice4444FormatsFeaturesEXT; + + + +// VK_EXT_device_fault is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_fault 1 +#define VK_EXT_DEVICE_FAULT_SPEC_VERSION 2 +#define VK_EXT_DEVICE_FAULT_EXTENSION_NAME "VK_EXT_device_fault" + +typedef enum VkDeviceFaultAddressTypeEXT { + VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT = 0, + VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT = 1, + VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT = 2, + VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT = 3, + VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT = 4, + VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT = 5, + VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT = 6, + VK_DEVICE_FAULT_ADDRESS_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceFaultAddressTypeEXT; + +typedef enum VkDeviceFaultVendorBinaryHeaderVersionEXT { + VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT = 1, + VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceFaultVendorBinaryHeaderVersionEXT; +typedef struct VkPhysicalDeviceFaultFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceFault; + VkBool32 deviceFaultVendorBinary; +} VkPhysicalDeviceFaultFeaturesEXT; + +typedef struct VkDeviceFaultCountsEXT { + VkStructureType sType; + void* pNext; + uint32_t addressInfoCount; + uint32_t vendorInfoCount; + VkDeviceSize vendorBinarySize; +} VkDeviceFaultCountsEXT; + +typedef struct VkDeviceFaultAddressInfoEXT { + VkDeviceFaultAddressTypeEXT addressType; + VkDeviceAddress reportedAddress; + VkDeviceSize addressPrecision; +} VkDeviceFaultAddressInfoEXT; + +typedef struct VkDeviceFaultVendorInfoEXT { + char description[VK_MAX_DESCRIPTION_SIZE]; + uint64_t vendorFaultCode; + uint64_t vendorFaultData; +} VkDeviceFaultVendorInfoEXT; + +typedef struct VkDeviceFaultInfoEXT { + VkStructureType sType; + void* pNext; + char description[VK_MAX_DESCRIPTION_SIZE]; + VkDeviceFaultAddressInfoEXT* pAddressInfos; + VkDeviceFaultVendorInfoEXT* pVendorInfos; + void* pVendorBinaryData; +} VkDeviceFaultInfoEXT; + +typedef struct VkDeviceFaultVendorBinaryHeaderVersionOneEXT { + uint32_t headerSize; + VkDeviceFaultVendorBinaryHeaderVersionEXT headerVersion; + uint32_t vendorID; + uint32_t deviceID; + uint32_t driverVersion; + uint8_t pipelineCacheUUID[VK_UUID_SIZE]; + uint32_t applicationNameOffset; + uint32_t applicationVersion; + uint32_t engineNameOffset; + uint32_t engineVersion; + uint32_t apiVersion; +} VkDeviceFaultVendorBinaryHeaderVersionOneEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceFaultInfoEXT)(VkDevice device, VkDeviceFaultCountsEXT* pFaultCounts, VkDeviceFaultInfoEXT* pFaultInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceFaultInfoEXT( + VkDevice device, + VkDeviceFaultCountsEXT* pFaultCounts, + VkDeviceFaultInfoEXT* pFaultInfo); +#endif + + +// VK_ARM_rasterization_order_attachment_access is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_rasterization_order_attachment_access 1 +#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 +#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_ARM_rasterization_order_attachment_access" +typedef struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 rasterizationOrderColorAttachmentAccess; + VkBool32 rasterizationOrderDepthAttachmentAccess; + VkBool32 rasterizationOrderStencilAttachmentAccess; +} VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT; + +typedef VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM; + + + +// VK_EXT_rgba10x6_formats is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_rgba10x6_formats 1 +#define VK_EXT_RGBA10X6_FORMATS_SPEC_VERSION 1 +#define VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME "VK_EXT_rgba10x6_formats" +typedef struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 formatRgba10x6WithoutYCbCrSampler; +} VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT; + + + +// VK_VALVE_mutable_descriptor_type is a preprocessor guard. Do not pass it to API calls. +#define VK_VALVE_mutable_descriptor_type 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 +#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_VALVE_mutable_descriptor_type" +typedef struct VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 mutableDescriptorType; +} VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT; + +typedef VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE; + +typedef struct VkMutableDescriptorTypeListEXT { + uint32_t descriptorTypeCount; + const VkDescriptorType* pDescriptorTypes; +} VkMutableDescriptorTypeListEXT; + +typedef VkMutableDescriptorTypeListEXT VkMutableDescriptorTypeListVALVE; + +typedef struct VkMutableDescriptorTypeCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t mutableDescriptorTypeListCount; + const VkMutableDescriptorTypeListEXT* pMutableDescriptorTypeLists; +} VkMutableDescriptorTypeCreateInfoEXT; + +typedef VkMutableDescriptorTypeCreateInfoEXT VkMutableDescriptorTypeCreateInfoVALVE; + + + +// VK_EXT_vertex_input_dynamic_state is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_vertex_input_dynamic_state 1 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION 2 +#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_vertex_input_dynamic_state" +typedef struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 vertexInputDynamicState; +} VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT; + +typedef struct VkVertexInputBindingDescription2EXT { + VkStructureType sType; + void* pNext; + uint32_t binding; + uint32_t stride; + VkVertexInputRate inputRate; + uint32_t divisor; +} VkVertexInputBindingDescription2EXT; + +typedef struct VkVertexInputAttributeDescription2EXT { + VkStructureType sType; + void* pNext; + uint32_t location; + uint32_t binding; + VkFormat format; + uint32_t offset; +} VkVertexInputAttributeDescription2EXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetVertexInputEXT)(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT( + VkCommandBuffer commandBuffer, + uint32_t vertexBindingDescriptionCount, + const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, + uint32_t vertexAttributeDescriptionCount, + const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); +#endif + + +// VK_EXT_physical_device_drm is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_physical_device_drm 1 +#define VK_EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION 1 +#define VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME "VK_EXT_physical_device_drm" +typedef struct VkPhysicalDeviceDrmPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 hasPrimary; + VkBool32 hasRender; + int64_t primaryMajor; + int64_t primaryMinor; + int64_t renderMajor; + int64_t renderMinor; +} VkPhysicalDeviceDrmPropertiesEXT; + + + +// VK_EXT_device_address_binding_report is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_address_binding_report 1 +#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION 1 +#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME "VK_EXT_device_address_binding_report" + +typedef enum VkDeviceAddressBindingTypeEXT { + VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT = 0, + VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT = 1, + VK_DEVICE_ADDRESS_BINDING_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceAddressBindingTypeEXT; + +typedef enum VkDeviceAddressBindingFlagBitsEXT { + VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT = 0x00000001, + VK_DEVICE_ADDRESS_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDeviceAddressBindingFlagBitsEXT; +typedef VkFlags VkDeviceAddressBindingFlagsEXT; +typedef struct VkPhysicalDeviceAddressBindingReportFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 reportAddressBinding; +} VkPhysicalDeviceAddressBindingReportFeaturesEXT; + +typedef struct VkDeviceAddressBindingCallbackDataEXT { + VkStructureType sType; + void* pNext; + VkDeviceAddressBindingFlagsEXT flags; + VkDeviceAddress baseAddress; + VkDeviceSize size; + VkDeviceAddressBindingTypeEXT bindingType; +} VkDeviceAddressBindingCallbackDataEXT; + + + +// VK_EXT_depth_clip_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clip_control 1 +#define VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clip_control" +typedef struct VkPhysicalDeviceDepthClipControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClipControl; +} VkPhysicalDeviceDepthClipControlFeaturesEXT; + +typedef struct VkPipelineViewportDepthClipControlCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 negativeOneToOne; +} VkPipelineViewportDepthClipControlCreateInfoEXT; + + + +// VK_EXT_primitive_topology_list_restart is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_primitive_topology_list_restart 1 +#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION 1 +#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME "VK_EXT_primitive_topology_list_restart" +typedef struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 primitiveTopologyListRestart; + VkBool32 primitiveTopologyPatchListRestart; +} VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT; + + + +// VK_HUAWEI_subpass_shading is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_subpass_shading 1 +#define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 3 +#define VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME "VK_HUAWEI_subpass_shading" +typedef struct VkSubpassShadingPipelineCreateInfoHUAWEI { + VkStructureType sType; + void* pNext; + VkRenderPass renderPass; + uint32_t subpass; +} VkSubpassShadingPipelineCreateInfoHUAWEI; + +typedef struct VkPhysicalDeviceSubpassShadingFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 subpassShading; +} VkPhysicalDeviceSubpassShadingFeaturesHUAWEI; + +typedef struct VkPhysicalDeviceSubpassShadingPropertiesHUAWEI { + VkStructureType sType; + void* pNext; + uint32_t maxSubpassShadingWorkgroupSizeAspectRatio; +} VkPhysicalDeviceSubpassShadingPropertiesHUAWEI; + +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI)(VkDevice device, VkRenderPass renderpass, VkExtent2D* pMaxWorkgroupSize); +typedef void (VKAPI_PTR *PFN_vkCmdSubpassShadingHUAWEI)(VkCommandBuffer commandBuffer); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( + VkDevice device, + VkRenderPass renderpass, + VkExtent2D* pMaxWorkgroupSize); + +VKAPI_ATTR void VKAPI_CALL vkCmdSubpassShadingHUAWEI( + VkCommandBuffer commandBuffer); +#endif + + +// VK_HUAWEI_invocation_mask is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_invocation_mask 1 +#define VK_HUAWEI_INVOCATION_MASK_SPEC_VERSION 1 +#define VK_HUAWEI_INVOCATION_MASK_EXTENSION_NAME "VK_HUAWEI_invocation_mask" +typedef struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 invocationMask; +} VkPhysicalDeviceInvocationMaskFeaturesHUAWEI; + +typedef void (VKAPI_PTR *PFN_vkCmdBindInvocationMaskHUAWEI)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdBindInvocationMaskHUAWEI( + VkCommandBuffer commandBuffer, + VkImageView imageView, + VkImageLayout imageLayout); +#endif + + +// VK_NV_external_memory_rdma is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory_rdma 1 +typedef void* VkRemoteAddressNV; +#define VK_NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_RDMA_EXTENSION_NAME "VK_NV_external_memory_rdma" +typedef struct VkMemoryGetRemoteAddressInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetRemoteAddressInfoNV; + +typedef struct VkPhysicalDeviceExternalMemoryRDMAFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 externalMemoryRDMA; +} VkPhysicalDeviceExternalMemoryRDMAFeaturesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryRemoteAddressNV)(VkDevice device, const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, VkRemoteAddressNV* pAddress); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryRemoteAddressNV( + VkDevice device, + const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, + VkRemoteAddressNV* pAddress); +#endif + + +// VK_EXT_pipeline_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_properties 1 +#define VK_EXT_PIPELINE_PROPERTIES_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_PROPERTIES_EXTENSION_NAME "VK_EXT_pipeline_properties" +typedef VkPipelineInfoKHR VkPipelineInfoEXT; + +typedef struct VkPipelinePropertiesIdentifierEXT { + VkStructureType sType; + void* pNext; + uint8_t pipelineIdentifier[VK_UUID_SIZE]; +} VkPipelinePropertiesIdentifierEXT; + +typedef struct VkPhysicalDevicePipelinePropertiesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelinePropertiesIdentifier; +} VkPhysicalDevicePipelinePropertiesFeaturesEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPipelinePropertiesEXT)(VkDevice device, const VkPipelineInfoEXT* pPipelineInfo, VkBaseOutStructure* pPipelineProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelinePropertiesEXT( + VkDevice device, + const VkPipelineInfoEXT* pPipelineInfo, + VkBaseOutStructure* pPipelineProperties); +#endif + + +// VK_EXT_frame_boundary is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_frame_boundary 1 +#define VK_EXT_FRAME_BOUNDARY_SPEC_VERSION 1 +#define VK_EXT_FRAME_BOUNDARY_EXTENSION_NAME "VK_EXT_frame_boundary" + +typedef enum VkFrameBoundaryFlagBitsEXT { + VK_FRAME_BOUNDARY_FRAME_END_BIT_EXT = 0x00000001, + VK_FRAME_BOUNDARY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkFrameBoundaryFlagBitsEXT; +typedef VkFlags VkFrameBoundaryFlagsEXT; +typedef struct VkPhysicalDeviceFrameBoundaryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 frameBoundary; +} VkPhysicalDeviceFrameBoundaryFeaturesEXT; + +typedef struct VkFrameBoundaryEXT { + VkStructureType sType; + const void* pNext; + VkFrameBoundaryFlagsEXT flags; + uint64_t frameID; + uint32_t imageCount; + const VkImage* pImages; + uint32_t bufferCount; + const VkBuffer* pBuffers; + uint64_t tagName; + size_t tagSize; + const void* pTag; +} VkFrameBoundaryEXT; + + + +// VK_EXT_multisampled_render_to_single_sampled is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_multisampled_render_to_single_sampled 1 +#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION 1 +#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME "VK_EXT_multisampled_render_to_single_sampled" +typedef struct VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 multisampledRenderToSingleSampled; +} VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT; + +typedef struct VkSubpassResolvePerformanceQueryEXT { + VkStructureType sType; + void* pNext; + VkBool32 optimal; +} VkSubpassResolvePerformanceQueryEXT; + +typedef struct VkMultisampledRenderToSingleSampledInfoEXT { + VkStructureType sType; + const void* pNext; + VkBool32 multisampledRenderToSingleSampledEnable; + VkSampleCountFlagBits rasterizationSamples; +} VkMultisampledRenderToSingleSampledInfoEXT; + + + +// VK_EXT_extended_dynamic_state2 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_extended_dynamic_state2 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME "VK_EXT_extended_dynamic_state2" +typedef struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState2; + VkBool32 extendedDynamicState2LogicOp; + VkBool32 extendedDynamicState2PatchControlPoints; +} VkPhysicalDeviceExtendedDynamicState2FeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetPatchControlPointsEXT)(VkCommandBuffer commandBuffer, uint32_t patchControlPoints); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEXT)(VkCommandBuffer commandBuffer, VkLogicOp logicOp); +typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetPatchControlPointsEXT( + VkCommandBuffer commandBuffer, + uint32_t patchControlPoints); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 rasterizerDiscardEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthBiasEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEXT( + VkCommandBuffer commandBuffer, + VkLogicOp logicOp); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 primitiveRestartEnable); +#endif + + +// VK_EXT_color_write_enable is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_color_write_enable 1 +#define VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION 1 +#define VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME "VK_EXT_color_write_enable" +typedef struct VkPhysicalDeviceColorWriteEnableFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 colorWriteEnable; +} VkPhysicalDeviceColorWriteEnableFeaturesEXT; + +typedef struct VkPipelineColorWriteCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t attachmentCount; + const VkBool32* pColorWriteEnables; +} VkPipelineColorWriteCreateInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteEnableEXT)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32* pColorWriteEnables); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteEnableEXT( + VkCommandBuffer commandBuffer, + uint32_t attachmentCount, + const VkBool32* pColorWriteEnables); +#endif + + +// VK_EXT_primitives_generated_query is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_primitives_generated_query 1 +#define VK_EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION 1 +#define VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME "VK_EXT_primitives_generated_query" +typedef struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 primitivesGeneratedQuery; + VkBool32 primitivesGeneratedQueryWithRasterizerDiscard; + VkBool32 primitivesGeneratedQueryWithNonZeroStreams; +} VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT; + + + +// VK_EXT_global_priority_query is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_global_priority_query 1 +#define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1 +#define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query" +#define VK_MAX_GLOBAL_PRIORITY_SIZE_EXT VK_MAX_GLOBAL_PRIORITY_SIZE_KHR +typedef VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT; + +typedef VkQueueFamilyGlobalPriorityPropertiesKHR VkQueueFamilyGlobalPriorityPropertiesEXT; + + + +// VK_EXT_image_view_min_lod is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_view_min_lod 1 +#define VK_EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION 1 +#define VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME "VK_EXT_image_view_min_lod" +typedef struct VkPhysicalDeviceImageViewMinLodFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 minLod; +} VkPhysicalDeviceImageViewMinLodFeaturesEXT; + +typedef struct VkImageViewMinLodCreateInfoEXT { + VkStructureType sType; + const void* pNext; + float minLod; +} VkImageViewMinLodCreateInfoEXT; + + + +// VK_EXT_multi_draw is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_multi_draw 1 +#define VK_EXT_MULTI_DRAW_SPEC_VERSION 1 +#define VK_EXT_MULTI_DRAW_EXTENSION_NAME "VK_EXT_multi_draw" +typedef struct VkPhysicalDeviceMultiDrawFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 multiDraw; +} VkPhysicalDeviceMultiDrawFeaturesEXT; + +typedef struct VkPhysicalDeviceMultiDrawPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxMultiDrawCount; +} VkPhysicalDeviceMultiDrawPropertiesEXT; + +typedef struct VkMultiDrawInfoEXT { + uint32_t firstVertex; + uint32_t vertexCount; +} VkMultiDrawInfoEXT; + +typedef struct VkMultiDrawIndexedInfoEXT { + uint32_t firstIndex; + uint32_t indexCount; + int32_t vertexOffset; +} VkMultiDrawIndexedInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiIndexedEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t* pVertexOffset); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiEXT( + VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawInfoEXT* pVertexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiIndexedEXT( + VkCommandBuffer commandBuffer, + uint32_t drawCount, + const VkMultiDrawIndexedInfoEXT* pIndexInfo, + uint32_t instanceCount, + uint32_t firstInstance, + uint32_t stride, + const int32_t* pVertexOffset); +#endif + + +// VK_EXT_image_2d_view_of_3d is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_2d_view_of_3d 1 +#define VK_EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION 1 +#define VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_2d_view_of_3d" +typedef struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 image2DViewOf3D; + VkBool32 sampler2DViewOf3D; +} VkPhysicalDeviceImage2DViewOf3DFeaturesEXT; + + + +// VK_EXT_shader_tile_image is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_tile_image 1 +#define VK_EXT_SHADER_TILE_IMAGE_SPEC_VERSION 1 +#define VK_EXT_SHADER_TILE_IMAGE_EXTENSION_NAME "VK_EXT_shader_tile_image" +typedef struct VkPhysicalDeviceShaderTileImageFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderTileImageColorReadAccess; + VkBool32 shaderTileImageDepthReadAccess; + VkBool32 shaderTileImageStencilReadAccess; +} VkPhysicalDeviceShaderTileImageFeaturesEXT; + +typedef struct VkPhysicalDeviceShaderTileImagePropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderTileImageCoherentReadAccelerated; + VkBool32 shaderTileImageReadSampleFromPixelRateInvocation; + VkBool32 shaderTileImageReadFromHelperInvocation; +} VkPhysicalDeviceShaderTileImagePropertiesEXT; + + + +// VK_EXT_opacity_micromap is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_opacity_micromap 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkMicromapEXT) +#define VK_EXT_OPACITY_MICROMAP_SPEC_VERSION 2 +#define VK_EXT_OPACITY_MICROMAP_EXTENSION_NAME "VK_EXT_opacity_micromap" + +typedef enum VkMicromapTypeEXT { + VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT = 0, +#ifdef VK_ENABLE_BETA_EXTENSIONS + VK_MICROMAP_TYPE_DISPLACEMENT_MICROMAP_NV = 1000397000, +#endif + VK_MICROMAP_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkMicromapTypeEXT; + +typedef enum VkBuildMicromapModeEXT { + VK_BUILD_MICROMAP_MODE_BUILD_EXT = 0, + VK_BUILD_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBuildMicromapModeEXT; + +typedef enum VkCopyMicromapModeEXT { + VK_COPY_MICROMAP_MODE_CLONE_EXT = 0, + VK_COPY_MICROMAP_MODE_SERIALIZE_EXT = 1, + VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT = 2, + VK_COPY_MICROMAP_MODE_COMPACT_EXT = 3, + VK_COPY_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkCopyMicromapModeEXT; + +typedef enum VkOpacityMicromapFormatEXT { + VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT = 1, + VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT = 2, + VK_OPACITY_MICROMAP_FORMAT_MAX_ENUM_EXT = 0x7FFFFFFF +} VkOpacityMicromapFormatEXT; + +typedef enum VkOpacityMicromapSpecialIndexEXT { + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT = -1, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT = -2, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT = -3, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT = -4, + VK_OPACITY_MICROMAP_SPECIAL_INDEX_MAX_ENUM_EXT = 0x7FFFFFFF +} VkOpacityMicromapSpecialIndexEXT; + +typedef enum VkAccelerationStructureCompatibilityKHR { + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1, + VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCompatibilityKHR; + +typedef enum VkAccelerationStructureBuildTypeKHR { + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2, + VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureBuildTypeKHR; + +typedef enum VkBuildMicromapFlagBitsEXT { + VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT = 0x00000001, + VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT = 0x00000002, + VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT = 0x00000004, + VK_BUILD_MICROMAP_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkBuildMicromapFlagBitsEXT; +typedef VkFlags VkBuildMicromapFlagsEXT; + +typedef enum VkMicromapCreateFlagBitsEXT { + VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = 0x00000001, + VK_MICROMAP_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkMicromapCreateFlagBitsEXT; +typedef VkFlags VkMicromapCreateFlagsEXT; +typedef struct VkMicromapUsageEXT { + uint32_t count; + uint32_t subdivisionLevel; + uint32_t format; +} VkMicromapUsageEXT; + +typedef union VkDeviceOrHostAddressKHR { + VkDeviceAddress deviceAddress; + void* hostAddress; +} VkDeviceOrHostAddressKHR; + +typedef struct VkMicromapBuildInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapTypeEXT type; + VkBuildMicromapFlagsEXT flags; + VkBuildMicromapModeEXT mode; + VkMicromapEXT dstMicromap; + uint32_t usageCountsCount; + const VkMicromapUsageEXT* pUsageCounts; + const VkMicromapUsageEXT* const* ppUsageCounts; + VkDeviceOrHostAddressConstKHR data; + VkDeviceOrHostAddressKHR scratchData; + VkDeviceOrHostAddressConstKHR triangleArray; + VkDeviceSize triangleArrayStride; +} VkMicromapBuildInfoEXT; + +typedef struct VkMicromapCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapCreateFlagsEXT createFlags; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; + VkMicromapTypeEXT type; + VkDeviceAddress deviceAddress; +} VkMicromapCreateInfoEXT; + +typedef struct VkPhysicalDeviceOpacityMicromapFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 micromap; + VkBool32 micromapCaptureReplay; + VkBool32 micromapHostCommands; +} VkPhysicalDeviceOpacityMicromapFeaturesEXT; + +typedef struct VkPhysicalDeviceOpacityMicromapPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxOpacity2StateSubdivisionLevel; + uint32_t maxOpacity4StateSubdivisionLevel; +} VkPhysicalDeviceOpacityMicromapPropertiesEXT; + +typedef struct VkMicromapVersionInfoEXT { + VkStructureType sType; + const void* pNext; + const uint8_t* pVersionData; +} VkMicromapVersionInfoEXT; + +typedef struct VkCopyMicromapToMemoryInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapEXT src; + VkDeviceOrHostAddressKHR dst; + VkCopyMicromapModeEXT mode; +} VkCopyMicromapToMemoryInfoEXT; + +typedef struct VkCopyMemoryToMicromapInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR src; + VkMicromapEXT dst; + VkCopyMicromapModeEXT mode; +} VkCopyMemoryToMicromapInfoEXT; + +typedef struct VkCopyMicromapInfoEXT { + VkStructureType sType; + const void* pNext; + VkMicromapEXT src; + VkMicromapEXT dst; + VkCopyMicromapModeEXT mode; +} VkCopyMicromapInfoEXT; + +typedef struct VkMicromapBuildSizesInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceSize micromapSize; + VkDeviceSize buildScratchSize; + VkBool32 discardable; +} VkMicromapBuildSizesInfoEXT; + +typedef struct VkAccelerationStructureTrianglesOpacityMicromapEXT { + VkStructureType sType; + void* pNext; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexBuffer; + VkDeviceSize indexStride; + uint32_t baseTriangle; + uint32_t usageCountsCount; + const VkMicromapUsageEXT* pUsageCounts; + const VkMicromapUsageEXT* const* ppUsageCounts; + VkMicromapEXT micromap; +} VkAccelerationStructureTrianglesOpacityMicromapEXT; + +typedef struct VkMicromapTriangleEXT { + uint32_t dataOffset; + uint16_t subdivisionLevel; + uint16_t format; +} VkMicromapTriangleEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMicromapEXT)(VkDevice device, const VkMicromapCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkMicromapEXT* pMicromap); +typedef void (VKAPI_PTR *PFN_vkDestroyMicromapEXT)(VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBuildMicromapsEXT)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos); +typedef VkResult (VKAPI_PTR *PFN_vkBuildMicromapsEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapToMemoryEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkWriteMicromapsPropertiesEXT)(VkDevice device, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, size_t dataSize, void* pData, size_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapToMemoryEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteMicromapsPropertiesEXT)(VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef void (VKAPI_PTR *PFN_vkGetDeviceMicromapCompatibilityEXT)(VkDevice device, const VkMicromapVersionInfoEXT* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility); +typedef void (VKAPI_PTR *PFN_vkGetMicromapBuildSizesEXT)(VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT* pBuildInfo, VkMicromapBuildSizesInfoEXT* pSizeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMicromapEXT( + VkDevice device, + const VkMicromapCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkMicromapEXT* pMicromap); + +VKAPI_ATTR void VKAPI_CALL vkDestroyMicromapEXT( + VkDevice device, + VkMicromapEXT micromap, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildMicromapsEXT( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkMicromapBuildInfoEXT* pInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkBuildMicromapsEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkMicromapBuildInfoEXT* pInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapInfoEXT* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapToMemoryEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapToMemoryInfoEXT* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToMicromapEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToMicromapInfoEXT* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkWriteMicromapsPropertiesEXT( + VkDevice device, + uint32_t micromapCount, + const VkMicromapEXT* pMicromaps, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapEXT( + VkCommandBuffer commandBuffer, + const VkCopyMicromapInfoEXT* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapToMemoryEXT( + VkCommandBuffer commandBuffer, + const VkCopyMicromapToMemoryInfoEXT* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToMicromapEXT( + VkCommandBuffer commandBuffer, + const VkCopyMemoryToMicromapInfoEXT* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteMicromapsPropertiesEXT( + VkCommandBuffer commandBuffer, + uint32_t micromapCount, + const VkMicromapEXT* pMicromaps, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceMicromapCompatibilityEXT( + VkDevice device, + const VkMicromapVersionInfoEXT* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility); + +VKAPI_ATTR void VKAPI_CALL vkGetMicromapBuildSizesEXT( + VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkMicromapBuildInfoEXT* pBuildInfo, + VkMicromapBuildSizesInfoEXT* pSizeInfo); +#endif + + +// VK_EXT_load_store_op_none is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_load_store_op_none 1 +#define VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION 1 +#define VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_EXT_load_store_op_none" + + +// VK_HUAWEI_cluster_culling_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_HUAWEI_cluster_culling_shader 1 +#define VK_HUAWEI_CLUSTER_CULLING_SHADER_SPEC_VERSION 3 +#define VK_HUAWEI_CLUSTER_CULLING_SHADER_EXTENSION_NAME "VK_HUAWEI_cluster_culling_shader" +typedef struct VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 clustercullingShader; + VkBool32 multiviewClusterCullingShader; +} VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI; + +typedef struct VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI { + VkStructureType sType; + void* pNext; + uint32_t maxWorkGroupCount[3]; + uint32_t maxWorkGroupSize[3]; + uint32_t maxOutputClusterCount; + VkDeviceSize indirectBufferOffsetAlignment; +} VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI; + +typedef struct VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI { + VkStructureType sType; + void* pNext; + VkBool32 clusterShadingRate; +} VkPhysicalDeviceClusterCullingShaderVrsFeaturesHUAWEI; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawClusterHUAWEI)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDrawClusterIndirectHUAWEI)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawClusterHUAWEI( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawClusterIndirectHUAWEI( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); +#endif + + +// VK_EXT_border_color_swizzle is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_border_color_swizzle 1 +#define VK_EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION 1 +#define VK_EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME "VK_EXT_border_color_swizzle" +typedef struct VkPhysicalDeviceBorderColorSwizzleFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 borderColorSwizzle; + VkBool32 borderColorSwizzleFromImage; +} VkPhysicalDeviceBorderColorSwizzleFeaturesEXT; + +typedef struct VkSamplerBorderColorComponentMappingCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkComponentMapping components; + VkBool32 srgb; +} VkSamplerBorderColorComponentMappingCreateInfoEXT; + + + +// VK_EXT_pageable_device_local_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pageable_device_local_memory 1 +#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION 1 +#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME "VK_EXT_pageable_device_local_memory" +typedef struct VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pageableDeviceLocalMemory; +} VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkSetDeviceMemoryPriorityEXT)(VkDevice device, VkDeviceMemory memory, float priority); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkSetDeviceMemoryPriorityEXT( + VkDevice device, + VkDeviceMemory memory, + float priority); +#endif + + +// VK_ARM_shader_core_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_shader_core_properties 1 +#define VK_ARM_SHADER_CORE_PROPERTIES_SPEC_VERSION 1 +#define VK_ARM_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_ARM_shader_core_properties" +typedef struct VkPhysicalDeviceShaderCorePropertiesARM { + VkStructureType sType; + void* pNext; + uint32_t pixelRate; + uint32_t texelRate; + uint32_t fmaRate; +} VkPhysicalDeviceShaderCorePropertiesARM; + + + +// VK_ARM_scheduling_controls is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_scheduling_controls 1 +#define VK_ARM_SCHEDULING_CONTROLS_SPEC_VERSION 1 +#define VK_ARM_SCHEDULING_CONTROLS_EXTENSION_NAME "VK_ARM_scheduling_controls" +typedef VkFlags64 VkPhysicalDeviceSchedulingControlsFlagsARM; + +// Flag bits for VkPhysicalDeviceSchedulingControlsFlagBitsARM +typedef VkFlags64 VkPhysicalDeviceSchedulingControlsFlagBitsARM; +static const VkPhysicalDeviceSchedulingControlsFlagBitsARM VK_PHYSICAL_DEVICE_SCHEDULING_CONTROLS_SHADER_CORE_COUNT_ARM = 0x00000001ULL; + +typedef struct VkDeviceQueueShaderCoreControlCreateInfoARM { + VkStructureType sType; + void* pNext; + uint32_t shaderCoreCount; +} VkDeviceQueueShaderCoreControlCreateInfoARM; + +typedef struct VkPhysicalDeviceSchedulingControlsFeaturesARM { + VkStructureType sType; + void* pNext; + VkBool32 schedulingControls; +} VkPhysicalDeviceSchedulingControlsFeaturesARM; + +typedef struct VkPhysicalDeviceSchedulingControlsPropertiesARM { + VkStructureType sType; + void* pNext; + VkPhysicalDeviceSchedulingControlsFlagsARM schedulingControlsFlags; +} VkPhysicalDeviceSchedulingControlsPropertiesARM; + + + +// VK_EXT_image_sliced_view_of_3d is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_sliced_view_of_3d 1 +#define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_SPEC_VERSION 1 +#define VK_EXT_IMAGE_SLICED_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_sliced_view_of_3d" +#define VK_REMAINING_3D_SLICES_EXT (~0U) +typedef struct VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 imageSlicedViewOf3D; +} VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT; + +typedef struct VkImageViewSlicedCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t sliceOffset; + uint32_t sliceCount; +} VkImageViewSlicedCreateInfoEXT; + + + +// VK_VALVE_descriptor_set_host_mapping is a preprocessor guard. Do not pass it to API calls. +#define VK_VALVE_descriptor_set_host_mapping 1 +#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION 1 +#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME "VK_VALVE_descriptor_set_host_mapping" +typedef struct VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE { + VkStructureType sType; + void* pNext; + VkBool32 descriptorSetHostMapping; +} VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE; + +typedef struct VkDescriptorSetBindingReferenceVALVE { + VkStructureType sType; + const void* pNext; + VkDescriptorSetLayout descriptorSetLayout; + uint32_t binding; +} VkDescriptorSetBindingReferenceVALVE; + +typedef struct VkDescriptorSetLayoutHostMappingInfoVALVE { + VkStructureType sType; + void* pNext; + size_t descriptorOffset; + uint32_t descriptorSize; +} VkDescriptorSetLayoutHostMappingInfoVALVE; + +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)(VkDevice device, const VkDescriptorSetBindingReferenceVALVE* pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping); +typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetHostMappingVALVE)(VkDevice device, VkDescriptorSet descriptorSet, void** ppData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutHostMappingInfoVALVE( + VkDevice device, + const VkDescriptorSetBindingReferenceVALVE* pBindingReference, + VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping); + +VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetHostMappingVALVE( + VkDevice device, + VkDescriptorSet descriptorSet, + void** ppData); +#endif + + +// VK_EXT_depth_clamp_zero_one is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clamp_zero_one 1 +#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME "VK_EXT_depth_clamp_zero_one" +typedef struct VkPhysicalDeviceDepthClampZeroOneFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClampZeroOne; +} VkPhysicalDeviceDepthClampZeroOneFeaturesEXT; + + + +// VK_EXT_non_seamless_cube_map is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_non_seamless_cube_map 1 +#define VK_EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION 1 +#define VK_EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME "VK_EXT_non_seamless_cube_map" +typedef struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 nonSeamlessCubeMap; +} VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT; + + + +// VK_ARM_render_pass_striped is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_render_pass_striped 1 +#define VK_ARM_RENDER_PASS_STRIPED_SPEC_VERSION 1 +#define VK_ARM_RENDER_PASS_STRIPED_EXTENSION_NAME "VK_ARM_render_pass_striped" +typedef struct VkPhysicalDeviceRenderPassStripedFeaturesARM { + VkStructureType sType; + void* pNext; + VkBool32 renderPassStriped; +} VkPhysicalDeviceRenderPassStripedFeaturesARM; + +typedef struct VkPhysicalDeviceRenderPassStripedPropertiesARM { + VkStructureType sType; + void* pNext; + VkExtent2D renderPassStripeGranularity; + uint32_t maxRenderPassStripes; +} VkPhysicalDeviceRenderPassStripedPropertiesARM; + +typedef struct VkRenderPassStripeInfoARM { + VkStructureType sType; + const void* pNext; + VkRect2D stripeArea; +} VkRenderPassStripeInfoARM; + +typedef struct VkRenderPassStripeBeginInfoARM { + VkStructureType sType; + const void* pNext; + uint32_t stripeInfoCount; + const VkRenderPassStripeInfoARM* pStripeInfos; +} VkRenderPassStripeBeginInfoARM; + +typedef struct VkRenderPassStripeSubmitInfoARM { + VkStructureType sType; + const void* pNext; + uint32_t stripeSemaphoreInfoCount; + const VkSemaphoreSubmitInfo* pStripeSemaphoreInfos; +} VkRenderPassStripeSubmitInfoARM; + + + +// VK_QCOM_fragment_density_map_offset is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_fragment_density_map_offset 1 +#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 2 +#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_QCOM_fragment_density_map_offset" +typedef struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 fragmentDensityMapOffset; +} VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM; + +typedef struct VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM { + VkStructureType sType; + void* pNext; + VkExtent2D fragmentDensityOffsetGranularity; +} VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM; + +typedef struct VkSubpassFragmentDensityMapOffsetEndInfoQCOM { + VkStructureType sType; + const void* pNext; + uint32_t fragmentDensityOffsetCount; + const VkOffset2D* pFragmentDensityOffsets; +} VkSubpassFragmentDensityMapOffsetEndInfoQCOM; + + + +// VK_NV_copy_memory_indirect is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_copy_memory_indirect 1 +#define VK_NV_COPY_MEMORY_INDIRECT_SPEC_VERSION 1 +#define VK_NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME "VK_NV_copy_memory_indirect" +typedef struct VkCopyMemoryIndirectCommandNV { + VkDeviceAddress srcAddress; + VkDeviceAddress dstAddress; + VkDeviceSize size; +} VkCopyMemoryIndirectCommandNV; + +typedef struct VkCopyMemoryToImageIndirectCommandNV { + VkDeviceAddress srcAddress; + uint32_t bufferRowLength; + uint32_t bufferImageHeight; + VkImageSubresourceLayers imageSubresource; + VkOffset3D imageOffset; + VkExtent3D imageExtent; +} VkCopyMemoryToImageIndirectCommandNV; + +typedef struct VkPhysicalDeviceCopyMemoryIndirectFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 indirectCopy; +} VkPhysicalDeviceCopyMemoryIndirectFeaturesNV; + +typedef struct VkPhysicalDeviceCopyMemoryIndirectPropertiesNV { + VkStructureType sType; + void* pNext; + VkQueueFlags supportedQueues; +} VkPhysicalDeviceCopyMemoryIndirectPropertiesNV; + +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryIndirectNV)(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToImageIndirectNV)(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VkImage dstImage, VkImageLayout dstImageLayout, const VkImageSubresourceLayers* pImageSubresources); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryIndirectNV( + VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToImageIndirectNV( + VkCommandBuffer commandBuffer, + VkDeviceAddress copyBufferAddress, + uint32_t copyCount, + uint32_t stride, + VkImage dstImage, + VkImageLayout dstImageLayout, + const VkImageSubresourceLayers* pImageSubresources); +#endif + + +// VK_NV_memory_decompression is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_memory_decompression 1 +#define VK_NV_MEMORY_DECOMPRESSION_SPEC_VERSION 1 +#define VK_NV_MEMORY_DECOMPRESSION_EXTENSION_NAME "VK_NV_memory_decompression" + +// Flag bits for VkMemoryDecompressionMethodFlagBitsNV +typedef VkFlags64 VkMemoryDecompressionMethodFlagBitsNV; +static const VkMemoryDecompressionMethodFlagBitsNV VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV = 0x00000001ULL; + +typedef VkFlags64 VkMemoryDecompressionMethodFlagsNV; +typedef struct VkDecompressMemoryRegionNV { + VkDeviceAddress srcAddress; + VkDeviceAddress dstAddress; + VkDeviceSize compressedSize; + VkDeviceSize decompressedSize; + VkMemoryDecompressionMethodFlagsNV decompressionMethod; +} VkDecompressMemoryRegionNV; + +typedef struct VkPhysicalDeviceMemoryDecompressionFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 memoryDecompression; +} VkPhysicalDeviceMemoryDecompressionFeaturesNV; + +typedef struct VkPhysicalDeviceMemoryDecompressionPropertiesNV { + VkStructureType sType; + void* pNext; + VkMemoryDecompressionMethodFlagsNV decompressionMethods; + uint64_t maxDecompressionIndirectCount; +} VkPhysicalDeviceMemoryDecompressionPropertiesNV; + +typedef void (VKAPI_PTR *PFN_vkCmdDecompressMemoryNV)(VkCommandBuffer commandBuffer, uint32_t decompressRegionCount, const VkDecompressMemoryRegionNV* pDecompressMemoryRegions); +typedef void (VKAPI_PTR *PFN_vkCmdDecompressMemoryIndirectCountNV)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectCommandsAddress, VkDeviceAddress indirectCommandsCountAddress, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryNV( + VkCommandBuffer commandBuffer, + uint32_t decompressRegionCount, + const VkDecompressMemoryRegionNV* pDecompressMemoryRegions); + +VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryIndirectCountNV( + VkCommandBuffer commandBuffer, + VkDeviceAddress indirectCommandsAddress, + VkDeviceAddress indirectCommandsCountAddress, + uint32_t stride); +#endif + + +// VK_NV_device_generated_commands_compute is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_device_generated_commands_compute 1 +#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_SPEC_VERSION 2 +#define VK_NV_DEVICE_GENERATED_COMMANDS_COMPUTE_EXTENSION_NAME "VK_NV_device_generated_commands_compute" +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCompute; + VkBool32 deviceGeneratedComputePipelines; + VkBool32 deviceGeneratedComputeCaptureReplay; +} VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV; + +typedef struct VkComputePipelineIndirectBufferInfoNV { + VkStructureType sType; + const void* pNext; + VkDeviceAddress deviceAddress; + VkDeviceSize size; + VkDeviceAddress pipelineDeviceAddressCaptureReplay; +} VkComputePipelineIndirectBufferInfoNV; + +typedef struct VkPipelineIndirectDeviceAddressInfoNV { + VkStructureType sType; + const void* pNext; + VkPipelineBindPoint pipelineBindPoint; + VkPipeline pipeline; +} VkPipelineIndirectDeviceAddressInfoNV; + +typedef struct VkBindPipelineIndirectCommandNV { + VkDeviceAddress pipelineAddress; +} VkBindPipelineIndirectCommandNV; + +typedef void (VKAPI_PTR *PFN_vkGetPipelineIndirectMemoryRequirementsNV)(VkDevice device, const VkComputePipelineCreateInfo* pCreateInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdUpdatePipelineIndirectBufferNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetPipelineIndirectDeviceAddressNV)(VkDevice device, const VkPipelineIndirectDeviceAddressInfoNV* pInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetPipelineIndirectMemoryRequirementsNV( + VkDevice device, + const VkComputePipelineCreateInfo* pCreateInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdUpdatePipelineIndirectBufferNV( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetPipelineIndirectDeviceAddressNV( + VkDevice device, + const VkPipelineIndirectDeviceAddressInfoNV* pInfo); +#endif + + +// VK_NV_linear_color_attachment is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_linear_color_attachment 1 +#define VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION 1 +#define VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME "VK_NV_linear_color_attachment" +typedef struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 linearColorAttachment; +} VkPhysicalDeviceLinearColorAttachmentFeaturesNV; + + + +// VK_GOOGLE_surfaceless_query is a preprocessor guard. Do not pass it to API calls. +#define VK_GOOGLE_surfaceless_query 1 +#define VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION 2 +#define VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME "VK_GOOGLE_surfaceless_query" + + +// VK_EXT_image_compression_control_swapchain is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_image_compression_control_swapchain 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION 1 +#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME "VK_EXT_image_compression_control_swapchain" +typedef struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 imageCompressionControlSwapchain; +} VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT; + + + +// VK_QCOM_image_processing is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_image_processing 1 +#define VK_QCOM_IMAGE_PROCESSING_SPEC_VERSION 1 +#define VK_QCOM_IMAGE_PROCESSING_EXTENSION_NAME "VK_QCOM_image_processing" +typedef struct VkImageViewSampleWeightCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkOffset2D filterCenter; + VkExtent2D filterSize; + uint32_t numPhases; +} VkImageViewSampleWeightCreateInfoQCOM; + +typedef struct VkPhysicalDeviceImageProcessingFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 textureSampleWeighted; + VkBool32 textureBoxFilter; + VkBool32 textureBlockMatch; +} VkPhysicalDeviceImageProcessingFeaturesQCOM; + +typedef struct VkPhysicalDeviceImageProcessingPropertiesQCOM { + VkStructureType sType; + void* pNext; + uint32_t maxWeightFilterPhases; + VkExtent2D maxWeightFilterDimension; + VkExtent2D maxBlockMatchRegion; + VkExtent2D maxBoxFilterBlockSize; +} VkPhysicalDeviceImageProcessingPropertiesQCOM; + + + +// VK_EXT_nested_command_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_nested_command_buffer 1 +#define VK_EXT_NESTED_COMMAND_BUFFER_SPEC_VERSION 1 +#define VK_EXT_NESTED_COMMAND_BUFFER_EXTENSION_NAME "VK_EXT_nested_command_buffer" +typedef struct VkPhysicalDeviceNestedCommandBufferFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 nestedCommandBuffer; + VkBool32 nestedCommandBufferRendering; + VkBool32 nestedCommandBufferSimultaneousUse; +} VkPhysicalDeviceNestedCommandBufferFeaturesEXT; + +typedef struct VkPhysicalDeviceNestedCommandBufferPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxCommandBufferNestingLevel; +} VkPhysicalDeviceNestedCommandBufferPropertiesEXT; + + + +// VK_EXT_external_memory_acquire_unmodified is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_external_memory_acquire_unmodified 1 +#define VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_SPEC_VERSION 1 +#define VK_EXT_EXTERNAL_MEMORY_ACQUIRE_UNMODIFIED_EXTENSION_NAME "VK_EXT_external_memory_acquire_unmodified" +typedef struct VkExternalMemoryAcquireUnmodifiedEXT { + VkStructureType sType; + const void* pNext; + VkBool32 acquireUnmodifiedMemory; +} VkExternalMemoryAcquireUnmodifiedEXT; + + + +// VK_EXT_extended_dynamic_state3 is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_extended_dynamic_state3 1 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION 2 +#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME "VK_EXT_extended_dynamic_state3" +typedef struct VkPhysicalDeviceExtendedDynamicState3FeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 extendedDynamicState3TessellationDomainOrigin; + VkBool32 extendedDynamicState3DepthClampEnable; + VkBool32 extendedDynamicState3PolygonMode; + VkBool32 extendedDynamicState3RasterizationSamples; + VkBool32 extendedDynamicState3SampleMask; + VkBool32 extendedDynamicState3AlphaToCoverageEnable; + VkBool32 extendedDynamicState3AlphaToOneEnable; + VkBool32 extendedDynamicState3LogicOpEnable; + VkBool32 extendedDynamicState3ColorBlendEnable; + VkBool32 extendedDynamicState3ColorBlendEquation; + VkBool32 extendedDynamicState3ColorWriteMask; + VkBool32 extendedDynamicState3RasterizationStream; + VkBool32 extendedDynamicState3ConservativeRasterizationMode; + VkBool32 extendedDynamicState3ExtraPrimitiveOverestimationSize; + VkBool32 extendedDynamicState3DepthClipEnable; + VkBool32 extendedDynamicState3SampleLocationsEnable; + VkBool32 extendedDynamicState3ColorBlendAdvanced; + VkBool32 extendedDynamicState3ProvokingVertexMode; + VkBool32 extendedDynamicState3LineRasterizationMode; + VkBool32 extendedDynamicState3LineStippleEnable; + VkBool32 extendedDynamicState3DepthClipNegativeOneToOne; + VkBool32 extendedDynamicState3ViewportWScalingEnable; + VkBool32 extendedDynamicState3ViewportSwizzle; + VkBool32 extendedDynamicState3CoverageToColorEnable; + VkBool32 extendedDynamicState3CoverageToColorLocation; + VkBool32 extendedDynamicState3CoverageModulationMode; + VkBool32 extendedDynamicState3CoverageModulationTableEnable; + VkBool32 extendedDynamicState3CoverageModulationTable; + VkBool32 extendedDynamicState3CoverageReductionMode; + VkBool32 extendedDynamicState3RepresentativeFragmentTestEnable; + VkBool32 extendedDynamicState3ShadingRateImageEnable; +} VkPhysicalDeviceExtendedDynamicState3FeaturesEXT; + +typedef struct VkPhysicalDeviceExtendedDynamicState3PropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 dynamicPrimitiveTopologyUnrestricted; +} VkPhysicalDeviceExtendedDynamicState3PropertiesEXT; + +typedef struct VkColorBlendEquationEXT { + VkBlendFactor srcColorBlendFactor; + VkBlendFactor dstColorBlendFactor; + VkBlendOp colorBlendOp; + VkBlendFactor srcAlphaBlendFactor; + VkBlendFactor dstAlphaBlendFactor; + VkBlendOp alphaBlendOp; +} VkColorBlendEquationEXT; + +typedef struct VkColorBlendAdvancedEXT { + VkBlendOp advancedBlendOp; + VkBool32 srcPremultiplied; + VkBool32 dstPremultiplied; + VkBlendOverlapEXT blendOverlap; + VkBool32 clampResults; +} VkColorBlendAdvancedEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClampEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetPolygonModeEXT)(VkCommandBuffer commandBuffer, VkPolygonMode polygonMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationSamplesEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples); +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleMaskEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask* pSampleMask); +typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToCoverageEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToOneEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 logicOpEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEnableEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32* pColorBlendEnables); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEquationEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT* pColorBlendEquations); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteMaskEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags* pColorWriteMasks); +typedef void (VKAPI_PTR *PFN_vkCmdSetTessellationDomainOriginEXT)(VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin); +typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationStreamEXT)(VkCommandBuffer commandBuffer, uint32_t rasterizationStream); +typedef void (VKAPI_PTR *PFN_vkCmdSetConservativeRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)(VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClipEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendAdvancedEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT* pColorBlendAdvanced); +typedef void (VKAPI_PTR *PFN_vkCmdSetProvokingVertexModeEXT)(VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipNegativeOneToOneEXT)(VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingEnableNV)(VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetViewportSwizzleNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV* pViewportSwizzles); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorLocationNV)(VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationModeNV)(VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableNV)(VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float* pCoverageModulationTable); +typedef void (VKAPI_PTR *PFN_vkCmdSetShadingRateImageEnableNV)(VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetRepresentativeFragmentTestEnableNV)(VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable); +typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageReductionModeNV)(VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthClampEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetPolygonModeEXT( + VkCommandBuffer commandBuffer, + VkPolygonMode polygonMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationSamplesEXT( + VkCommandBuffer commandBuffer, + VkSampleCountFlagBits rasterizationSamples); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleMaskEXT( + VkCommandBuffer commandBuffer, + VkSampleCountFlagBits samples, + const VkSampleMask* pSampleMask); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToCoverageEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 alphaToCoverageEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToOneEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 alphaToOneEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 logicOpEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEnableEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkBool32* pColorBlendEnables); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEquationEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendEquationEXT* pColorBlendEquations); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteMaskEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorComponentFlags* pColorWriteMasks); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetTessellationDomainOriginEXT( + VkCommandBuffer commandBuffer, + VkTessellationDomainOrigin domainOrigin); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationStreamEXT( + VkCommandBuffer commandBuffer, + uint32_t rasterizationStream); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetConservativeRasterizationModeEXT( + VkCommandBuffer commandBuffer, + VkConservativeRasterizationModeEXT conservativeRasterizationMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetExtraPrimitiveOverestimationSizeEXT( + VkCommandBuffer commandBuffer, + float extraPrimitiveOverestimationSize); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 depthClipEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 sampleLocationsEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendAdvancedEXT( + VkCommandBuffer commandBuffer, + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorBlendAdvancedEXT* pColorBlendAdvanced); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetProvokingVertexModeEXT( + VkCommandBuffer commandBuffer, + VkProvokingVertexModeEXT provokingVertexMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineRasterizationModeEXT( + VkCommandBuffer commandBuffer, + VkLineRasterizationModeEXT lineRasterizationMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEnableEXT( + VkCommandBuffer commandBuffer, + VkBool32 stippledLineEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipNegativeOneToOneEXT( + VkCommandBuffer commandBuffer, + VkBool32 negativeOneToOne); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 viewportWScalingEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportSwizzleNV( + VkCommandBuffer commandBuffer, + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportSwizzleNV* pViewportSwizzles); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 coverageToColorEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorLocationNV( + VkCommandBuffer commandBuffer, + uint32_t coverageToColorLocation); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationModeNV( + VkCommandBuffer commandBuffer, + VkCoverageModulationModeNV coverageModulationMode); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 coverageModulationTableEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableNV( + VkCommandBuffer commandBuffer, + uint32_t coverageModulationTableCount, + const float* pCoverageModulationTable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetShadingRateImageEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 shadingRateImageEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRepresentativeFragmentTestEnableNV( + VkCommandBuffer commandBuffer, + VkBool32 representativeFragmentTestEnable); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageReductionModeNV( + VkCommandBuffer commandBuffer, + VkCoverageReductionModeNV coverageReductionMode); +#endif + + +// VK_EXT_subpass_merge_feedback is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_subpass_merge_feedback 1 +#define VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION 2 +#define VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME "VK_EXT_subpass_merge_feedback" + +typedef enum VkSubpassMergeStatusEXT { + VK_SUBPASS_MERGE_STATUS_MERGED_EXT = 0, + VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT = 1, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT = 2, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT = 3, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT = 4, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT = 5, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT = 6, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT = 7, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT = 8, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT = 9, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT = 10, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT = 11, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT = 12, + VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT = 13, + VK_SUBPASS_MERGE_STATUS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkSubpassMergeStatusEXT; +typedef struct VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 subpassMergeFeedback; +} VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT; + +typedef struct VkRenderPassCreationControlEXT { + VkStructureType sType; + const void* pNext; + VkBool32 disallowMerging; +} VkRenderPassCreationControlEXT; + +typedef struct VkRenderPassCreationFeedbackInfoEXT { + uint32_t postMergeSubpassCount; +} VkRenderPassCreationFeedbackInfoEXT; + +typedef struct VkRenderPassCreationFeedbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkRenderPassCreationFeedbackInfoEXT* pRenderPassFeedback; +} VkRenderPassCreationFeedbackCreateInfoEXT; + +typedef struct VkRenderPassSubpassFeedbackInfoEXT { + VkSubpassMergeStatusEXT subpassMergeStatus; + char description[VK_MAX_DESCRIPTION_SIZE]; + uint32_t postMergeIndex; +} VkRenderPassSubpassFeedbackInfoEXT; + +typedef struct VkRenderPassSubpassFeedbackCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkRenderPassSubpassFeedbackInfoEXT* pSubpassFeedback; +} VkRenderPassSubpassFeedbackCreateInfoEXT; + + + +// VK_LUNARG_direct_driver_loading is a preprocessor guard. Do not pass it to API calls. +#define VK_LUNARG_direct_driver_loading 1 +#define VK_LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION 1 +#define VK_LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME "VK_LUNARG_direct_driver_loading" + +typedef enum VkDirectDriverLoadingModeLUNARG { + VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG = 0, + VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG = 1, + VK_DIRECT_DRIVER_LOADING_MODE_MAX_ENUM_LUNARG = 0x7FFFFFFF +} VkDirectDriverLoadingModeLUNARG; +typedef VkFlags VkDirectDriverLoadingFlagsLUNARG; +typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddrLUNARG)( + VkInstance instance, const char* pName); + +typedef struct VkDirectDriverLoadingInfoLUNARG { + VkStructureType sType; + void* pNext; + VkDirectDriverLoadingFlagsLUNARG flags; + PFN_vkGetInstanceProcAddrLUNARG pfnGetInstanceProcAddr; +} VkDirectDriverLoadingInfoLUNARG; + +typedef struct VkDirectDriverLoadingListLUNARG { + VkStructureType sType; + const void* pNext; + VkDirectDriverLoadingModeLUNARG mode; + uint32_t driverCount; + const VkDirectDriverLoadingInfoLUNARG* pDrivers; +} VkDirectDriverLoadingListLUNARG; + + + +// VK_EXT_shader_module_identifier is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_module_identifier 1 +#define VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT 32U +#define VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION 1 +#define VK_EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME "VK_EXT_shader_module_identifier" +typedef struct VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderModuleIdentifier; +} VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT; + +typedef struct VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT { + VkStructureType sType; + void* pNext; + uint8_t shaderModuleIdentifierAlgorithmUUID[VK_UUID_SIZE]; +} VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT; + +typedef struct VkPipelineShaderStageModuleIdentifierCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t identifierSize; + const uint8_t* pIdentifier; +} VkPipelineShaderStageModuleIdentifierCreateInfoEXT; + +typedef struct VkShaderModuleIdentifierEXT { + VkStructureType sType; + void* pNext; + uint32_t identifierSize; + uint8_t identifier[VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT]; +} VkShaderModuleIdentifierEXT; + +typedef void (VKAPI_PTR *PFN_vkGetShaderModuleIdentifierEXT)(VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT* pIdentifier); +typedef void (VKAPI_PTR *PFN_vkGetShaderModuleCreateInfoIdentifierEXT)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleIdentifierEXT( + VkDevice device, + VkShaderModule shaderModule, + VkShaderModuleIdentifierEXT* pIdentifier); + +VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleCreateInfoIdentifierEXT( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + VkShaderModuleIdentifierEXT* pIdentifier); +#endif + + +// VK_EXT_rasterization_order_attachment_access is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_rasterization_order_attachment_access 1 +#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1 +#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_EXT_rasterization_order_attachment_access" + + +// VK_NV_optical_flow is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_optical_flow 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkOpticalFlowSessionNV) +#define VK_NV_OPTICAL_FLOW_SPEC_VERSION 1 +#define VK_NV_OPTICAL_FLOW_EXTENSION_NAME "VK_NV_optical_flow" + +typedef enum VkOpticalFlowPerformanceLevelNV { + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV = 1, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV = 2, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV = 3, + VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowPerformanceLevelNV; + +typedef enum VkOpticalFlowSessionBindingPointNV { + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV = 1, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV = 2, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV = 3, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV = 4, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV = 5, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV = 6, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV = 7, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV = 8, + VK_OPTICAL_FLOW_SESSION_BINDING_POINT_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowSessionBindingPointNV; + +typedef enum VkOpticalFlowGridSizeFlagBitsNV { + VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV = 0x00000002, + VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV = 0x00000004, + VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV = 0x00000008, + VK_OPTICAL_FLOW_GRID_SIZE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowGridSizeFlagBitsNV; +typedef VkFlags VkOpticalFlowGridSizeFlagsNV; + +typedef enum VkOpticalFlowUsageFlagBitsNV { + VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV = 0, + VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV = 0x00000002, + VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV = 0x00000004, + VK_OPTICAL_FLOW_USAGE_COST_BIT_NV = 0x00000008, + VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV = 0x00000010, + VK_OPTICAL_FLOW_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowUsageFlagBitsNV; +typedef VkFlags VkOpticalFlowUsageFlagsNV; + +typedef enum VkOpticalFlowSessionCreateFlagBitsNV { + VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV = 0x00000002, + VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV = 0x00000004, + VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV = 0x00000008, + VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV = 0x00000010, + VK_OPTICAL_FLOW_SESSION_CREATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowSessionCreateFlagBitsNV; +typedef VkFlags VkOpticalFlowSessionCreateFlagsNV; + +typedef enum VkOpticalFlowExecuteFlagBitsNV { + VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV = 0x00000001, + VK_OPTICAL_FLOW_EXECUTE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF +} VkOpticalFlowExecuteFlagBitsNV; +typedef VkFlags VkOpticalFlowExecuteFlagsNV; +typedef struct VkPhysicalDeviceOpticalFlowFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 opticalFlow; +} VkPhysicalDeviceOpticalFlowFeaturesNV; + +typedef struct VkPhysicalDeviceOpticalFlowPropertiesNV { + VkStructureType sType; + void* pNext; + VkOpticalFlowGridSizeFlagsNV supportedOutputGridSizes; + VkOpticalFlowGridSizeFlagsNV supportedHintGridSizes; + VkBool32 hintSupported; + VkBool32 costSupported; + VkBool32 bidirectionalFlowSupported; + VkBool32 globalFlowSupported; + uint32_t minWidth; + uint32_t minHeight; + uint32_t maxWidth; + uint32_t maxHeight; + uint32_t maxNumRegionsOfInterest; +} VkPhysicalDeviceOpticalFlowPropertiesNV; + +typedef struct VkOpticalFlowImageFormatInfoNV { + VkStructureType sType; + const void* pNext; + VkOpticalFlowUsageFlagsNV usage; +} VkOpticalFlowImageFormatInfoNV; + +typedef struct VkOpticalFlowImageFormatPropertiesNV { + VkStructureType sType; + const void* pNext; + VkFormat format; +} VkOpticalFlowImageFormatPropertiesNV; + +typedef struct VkOpticalFlowSessionCreateInfoNV { + VkStructureType sType; + void* pNext; + uint32_t width; + uint32_t height; + VkFormat imageFormat; + VkFormat flowVectorFormat; + VkFormat costFormat; + VkOpticalFlowGridSizeFlagsNV outputGridSize; + VkOpticalFlowGridSizeFlagsNV hintGridSize; + VkOpticalFlowPerformanceLevelNV performanceLevel; + VkOpticalFlowSessionCreateFlagsNV flags; +} VkOpticalFlowSessionCreateInfoNV; + +typedef struct VkOpticalFlowSessionCreatePrivateDataInfoNV { + VkStructureType sType; + void* pNext; + uint32_t id; + uint32_t size; + const void* pPrivateData; +} VkOpticalFlowSessionCreatePrivateDataInfoNV; + +typedef struct VkOpticalFlowExecuteInfoNV { + VkStructureType sType; + void* pNext; + VkOpticalFlowExecuteFlagsNV flags; + uint32_t regionCount; + const VkRect2D* pRegions; +} VkOpticalFlowExecuteInfoNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV)(VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, uint32_t* pFormatCount, VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties); +typedef VkResult (VKAPI_PTR *PFN_vkCreateOpticalFlowSessionNV)(VkDevice device, const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkOpticalFlowSessionNV* pSession); +typedef void (VKAPI_PTR *PFN_vkDestroyOpticalFlowSessionNV)(VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkBindOpticalFlowSessionImageNV)(VkDevice device, VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, VkImageView view, VkImageLayout layout); +typedef void (VKAPI_PTR *PFN_vkCmdOpticalFlowExecuteNV)(VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV* pExecuteInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceOpticalFlowImageFormatsNV( + VkPhysicalDevice physicalDevice, + const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, + uint32_t* pFormatCount, + VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateOpticalFlowSessionNV( + VkDevice device, + const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkOpticalFlowSessionNV* pSession); + +VKAPI_ATTR void VKAPI_CALL vkDestroyOpticalFlowSessionNV( + VkDevice device, + VkOpticalFlowSessionNV session, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkBindOpticalFlowSessionImageNV( + VkDevice device, + VkOpticalFlowSessionNV session, + VkOpticalFlowSessionBindingPointNV bindingPoint, + VkImageView view, + VkImageLayout layout); + +VKAPI_ATTR void VKAPI_CALL vkCmdOpticalFlowExecuteNV( + VkCommandBuffer commandBuffer, + VkOpticalFlowSessionNV session, + const VkOpticalFlowExecuteInfoNV* pExecuteInfo); +#endif + + +// VK_EXT_legacy_dithering is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_legacy_dithering 1 +#define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 2 +#define VK_EXT_LEGACY_DITHERING_EXTENSION_NAME "VK_EXT_legacy_dithering" +typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 legacyDithering; +} VkPhysicalDeviceLegacyDitheringFeaturesEXT; + + + +// VK_EXT_pipeline_protected_access is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_protected_access 1 +#define VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME "VK_EXT_pipeline_protected_access" +typedef struct VkPhysicalDevicePipelineProtectedAccessFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelineProtectedAccess; +} VkPhysicalDevicePipelineProtectedAccessFeaturesEXT; + + + +// VK_AMD_anti_lag is a preprocessor guard. Do not pass it to API calls. +#define VK_AMD_anti_lag 1 +#define VK_AMD_ANTI_LAG_SPEC_VERSION 1 +#define VK_AMD_ANTI_LAG_EXTENSION_NAME "VK_AMD_anti_lag" + +typedef enum VkAntiLagModeAMD { + VK_ANTI_LAG_MODE_DRIVER_CONTROL_AMD = 0, + VK_ANTI_LAG_MODE_ON_AMD = 1, + VK_ANTI_LAG_MODE_OFF_AMD = 2, + VK_ANTI_LAG_MODE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkAntiLagModeAMD; + +typedef enum VkAntiLagStageAMD { + VK_ANTI_LAG_STAGE_INPUT_AMD = 0, + VK_ANTI_LAG_STAGE_PRESENT_AMD = 1, + VK_ANTI_LAG_STAGE_MAX_ENUM_AMD = 0x7FFFFFFF +} VkAntiLagStageAMD; +typedef struct VkPhysicalDeviceAntiLagFeaturesAMD { + VkStructureType sType; + void* pNext; + VkBool32 antiLag; +} VkPhysicalDeviceAntiLagFeaturesAMD; + +typedef struct VkAntiLagPresentationInfoAMD { + VkStructureType sType; + void* pNext; + VkAntiLagStageAMD stage; + uint64_t frameIndex; +} VkAntiLagPresentationInfoAMD; + +typedef struct VkAntiLagDataAMD { + VkStructureType sType; + const void* pNext; + VkAntiLagModeAMD mode; + uint32_t maxFPS; + const VkAntiLagPresentationInfoAMD* pPresentationInfo; +} VkAntiLagDataAMD; + +typedef void (VKAPI_PTR *PFN_vkAntiLagUpdateAMD)(VkDevice device, const VkAntiLagDataAMD* pData); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkAntiLagUpdateAMD( + VkDevice device, + const VkAntiLagDataAMD* pData); +#endif + + +// VK_EXT_shader_object is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_object 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderEXT) +#define VK_EXT_SHADER_OBJECT_SPEC_VERSION 1 +#define VK_EXT_SHADER_OBJECT_EXTENSION_NAME "VK_EXT_shader_object" + +typedef enum VkShaderCodeTypeEXT { + VK_SHADER_CODE_TYPE_BINARY_EXT = 0, + VK_SHADER_CODE_TYPE_SPIRV_EXT = 1, + VK_SHADER_CODE_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkShaderCodeTypeEXT; + +typedef enum VkDepthClampModeEXT { + VK_DEPTH_CLAMP_MODE_VIEWPORT_RANGE_EXT = 0, + VK_DEPTH_CLAMP_MODE_USER_DEFINED_RANGE_EXT = 1, + VK_DEPTH_CLAMP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkDepthClampModeEXT; + +typedef enum VkShaderCreateFlagBitsEXT { + VK_SHADER_CREATE_LINK_STAGE_BIT_EXT = 0x00000001, + VK_SHADER_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = 0x00000002, + VK_SHADER_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = 0x00000004, + VK_SHADER_CREATE_NO_TASK_SHADER_BIT_EXT = 0x00000008, + VK_SHADER_CREATE_DISPATCH_BASE_BIT_EXT = 0x00000010, + VK_SHADER_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_EXT = 0x00000020, + VK_SHADER_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00000040, + VK_SHADER_CREATE_INDIRECT_BINDABLE_BIT_EXT = 0x00000080, + VK_SHADER_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkShaderCreateFlagBitsEXT; +typedef VkFlags VkShaderCreateFlagsEXT; +typedef struct VkPhysicalDeviceShaderObjectFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderObject; +} VkPhysicalDeviceShaderObjectFeaturesEXT; + +typedef struct VkPhysicalDeviceShaderObjectPropertiesEXT { + VkStructureType sType; + void* pNext; + uint8_t shaderBinaryUUID[VK_UUID_SIZE]; + uint32_t shaderBinaryVersion; +} VkPhysicalDeviceShaderObjectPropertiesEXT; + +typedef struct VkShaderCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderCreateFlagsEXT flags; + VkShaderStageFlagBits stage; + VkShaderStageFlags nextStage; + VkShaderCodeTypeEXT codeType; + size_t codeSize; + const void* pCode; + const char* pName; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; + const VkSpecializationInfo* pSpecializationInfo; +} VkShaderCreateInfoEXT; + +typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkShaderRequiredSubgroupSizeCreateInfoEXT; + +typedef struct VkDepthClampRangeEXT { + float minDepthClamp; + float maxDepthClamp; +} VkDepthClampRangeEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateShadersEXT)(VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkShaderEXT* pShaders); +typedef void (VKAPI_PTR *PFN_vkDestroyShaderEXT)(VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetShaderBinaryDataEXT)(VkDevice device, VkShaderEXT shader, size_t* pDataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdBindShadersEXT)(VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, const VkShaderEXT* pShaders); +typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampRangeEXT)(VkCommandBuffer commandBuffer, VkDepthClampModeEXT depthClampMode, const VkDepthClampRangeEXT* pDepthClampRange); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateShadersEXT( + VkDevice device, + uint32_t createInfoCount, + const VkShaderCreateInfoEXT* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkShaderEXT* pShaders); + +VKAPI_ATTR void VKAPI_CALL vkDestroyShaderEXT( + VkDevice device, + VkShaderEXT shader, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderBinaryDataEXT( + VkDevice device, + VkShaderEXT shader, + size_t* pDataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdBindShadersEXT( + VkCommandBuffer commandBuffer, + uint32_t stageCount, + const VkShaderStageFlagBits* pStages, + const VkShaderEXT* pShaders); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampRangeEXT( + VkCommandBuffer commandBuffer, + VkDepthClampModeEXT depthClampMode, + const VkDepthClampRangeEXT* pDepthClampRange); +#endif + + +// VK_QCOM_tile_properties is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_tile_properties 1 +#define VK_QCOM_TILE_PROPERTIES_SPEC_VERSION 1 +#define VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME "VK_QCOM_tile_properties" +typedef struct VkPhysicalDeviceTilePropertiesFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 tileProperties; +} VkPhysicalDeviceTilePropertiesFeaturesQCOM; + +typedef struct VkTilePropertiesQCOM { + VkStructureType sType; + void* pNext; + VkExtent3D tileSize; + VkExtent2D apronSize; + VkOffset2D origin; +} VkTilePropertiesQCOM; + +typedef VkResult (VKAPI_PTR *PFN_vkGetFramebufferTilePropertiesQCOM)(VkDevice device, VkFramebuffer framebuffer, uint32_t* pPropertiesCount, VkTilePropertiesQCOM* pProperties); +typedef VkResult (VKAPI_PTR *PFN_vkGetDynamicRenderingTilePropertiesQCOM)(VkDevice device, const VkRenderingInfo* pRenderingInfo, VkTilePropertiesQCOM* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetFramebufferTilePropertiesQCOM( + VkDevice device, + VkFramebuffer framebuffer, + uint32_t* pPropertiesCount, + VkTilePropertiesQCOM* pProperties); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDynamicRenderingTilePropertiesQCOM( + VkDevice device, + const VkRenderingInfo* pRenderingInfo, + VkTilePropertiesQCOM* pProperties); +#endif + + +// VK_SEC_amigo_profiling is a preprocessor guard. Do not pass it to API calls. +#define VK_SEC_amigo_profiling 1 +#define VK_SEC_AMIGO_PROFILING_SPEC_VERSION 1 +#define VK_SEC_AMIGO_PROFILING_EXTENSION_NAME "VK_SEC_amigo_profiling" +typedef struct VkPhysicalDeviceAmigoProfilingFeaturesSEC { + VkStructureType sType; + void* pNext; + VkBool32 amigoProfiling; +} VkPhysicalDeviceAmigoProfilingFeaturesSEC; + +typedef struct VkAmigoProfilingSubmitInfoSEC { + VkStructureType sType; + const void* pNext; + uint64_t firstDrawTimestamp; + uint64_t swapBufferTimestamp; +} VkAmigoProfilingSubmitInfoSEC; + + + +// VK_QCOM_multiview_per_view_viewports is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_multiview_per_view_viewports 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_EXTENSION_NAME "VK_QCOM_multiview_per_view_viewports" +typedef struct VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 multiviewPerViewViewports; +} VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM; + + + +// VK_NV_ray_tracing_invocation_reorder is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_invocation_reorder 1 +#define VK_NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME "VK_NV_ray_tracing_invocation_reorder" + +typedef enum VkRayTracingInvocationReorderModeNV { + VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV = 0, + VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV = 1, + VK_RAY_TRACING_INVOCATION_REORDER_MODE_MAX_ENUM_NV = 0x7FFFFFFF +} VkRayTracingInvocationReorderModeNV; +typedef struct VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV { + VkStructureType sType; + void* pNext; + VkRayTracingInvocationReorderModeNV rayTracingInvocationReorderReorderingHint; +} VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV; + +typedef struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingInvocationReorder; +} VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV; + + + +// VK_NV_extended_sparse_address_space is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_extended_sparse_address_space 1 +#define VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_SPEC_VERSION 1 +#define VK_NV_EXTENDED_SPARSE_ADDRESS_SPACE_EXTENSION_NAME "VK_NV_extended_sparse_address_space" +typedef struct VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 extendedSparseAddressSpace; +} VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV; + +typedef struct VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV { + VkStructureType sType; + void* pNext; + VkDeviceSize extendedSparseAddressSpaceSize; + VkImageUsageFlags extendedSparseImageUsageFlags; + VkBufferUsageFlags extendedSparseBufferUsageFlags; +} VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV; + + + +// VK_EXT_mutable_descriptor_type is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_mutable_descriptor_type 1 +#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1 +#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_EXT_mutable_descriptor_type" + + +// VK_EXT_legacy_vertex_attributes is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_legacy_vertex_attributes 1 +#define VK_EXT_LEGACY_VERTEX_ATTRIBUTES_SPEC_VERSION 1 +#define VK_EXT_LEGACY_VERTEX_ATTRIBUTES_EXTENSION_NAME "VK_EXT_legacy_vertex_attributes" +typedef struct VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 legacyVertexAttributes; +} VkPhysicalDeviceLegacyVertexAttributesFeaturesEXT; + +typedef struct VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT { + VkStructureType sType; + void* pNext; + VkBool32 nativeUnalignedPerformance; +} VkPhysicalDeviceLegacyVertexAttributesPropertiesEXT; + + + +// VK_EXT_layer_settings is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_layer_settings 1 +#define VK_EXT_LAYER_SETTINGS_SPEC_VERSION 2 +#define VK_EXT_LAYER_SETTINGS_EXTENSION_NAME "VK_EXT_layer_settings" + +typedef enum VkLayerSettingTypeEXT { + VK_LAYER_SETTING_TYPE_BOOL32_EXT = 0, + VK_LAYER_SETTING_TYPE_INT32_EXT = 1, + VK_LAYER_SETTING_TYPE_INT64_EXT = 2, + VK_LAYER_SETTING_TYPE_UINT32_EXT = 3, + VK_LAYER_SETTING_TYPE_UINT64_EXT = 4, + VK_LAYER_SETTING_TYPE_FLOAT32_EXT = 5, + VK_LAYER_SETTING_TYPE_FLOAT64_EXT = 6, + VK_LAYER_SETTING_TYPE_STRING_EXT = 7, + VK_LAYER_SETTING_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkLayerSettingTypeEXT; +typedef struct VkLayerSettingEXT { + const char* pLayerName; + const char* pSettingName; + VkLayerSettingTypeEXT type; + uint32_t valueCount; + const void* pValues; +} VkLayerSettingEXT; + +typedef struct VkLayerSettingsCreateInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t settingCount; + const VkLayerSettingEXT* pSettings; +} VkLayerSettingsCreateInfoEXT; + + + +// VK_ARM_shader_core_builtins is a preprocessor guard. Do not pass it to API calls. +#define VK_ARM_shader_core_builtins 1 +#define VK_ARM_SHADER_CORE_BUILTINS_SPEC_VERSION 2 +#define VK_ARM_SHADER_CORE_BUILTINS_EXTENSION_NAME "VK_ARM_shader_core_builtins" +typedef struct VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM { + VkStructureType sType; + void* pNext; + VkBool32 shaderCoreBuiltins; +} VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM; + +typedef struct VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM { + VkStructureType sType; + void* pNext; + uint64_t shaderCoreMask; + uint32_t shaderCoreCount; + uint32_t shaderWarpsPerCore; +} VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM; + + + +// VK_EXT_pipeline_library_group_handles is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_pipeline_library_group_handles 1 +#define VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_SPEC_VERSION 1 +#define VK_EXT_PIPELINE_LIBRARY_GROUP_HANDLES_EXTENSION_NAME "VK_EXT_pipeline_library_group_handles" +typedef struct VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 pipelineLibraryGroupHandles; +} VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT; + + + +// VK_EXT_dynamic_rendering_unused_attachments is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_dynamic_rendering_unused_attachments 1 +#define VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_SPEC_VERSION 1 +#define VK_EXT_DYNAMIC_RENDERING_UNUSED_ATTACHMENTS_EXTENSION_NAME "VK_EXT_dynamic_rendering_unused_attachments" +typedef struct VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 dynamicRenderingUnusedAttachments; +} VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT; + + + +// VK_NV_low_latency2 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_low_latency2 1 +#define VK_NV_LOW_LATENCY_2_SPEC_VERSION 2 +#define VK_NV_LOW_LATENCY_2_EXTENSION_NAME "VK_NV_low_latency2" + +typedef enum VkLatencyMarkerNV { + VK_LATENCY_MARKER_SIMULATION_START_NV = 0, + VK_LATENCY_MARKER_SIMULATION_END_NV = 1, + VK_LATENCY_MARKER_RENDERSUBMIT_START_NV = 2, + VK_LATENCY_MARKER_RENDERSUBMIT_END_NV = 3, + VK_LATENCY_MARKER_PRESENT_START_NV = 4, + VK_LATENCY_MARKER_PRESENT_END_NV = 5, + VK_LATENCY_MARKER_INPUT_SAMPLE_NV = 6, + VK_LATENCY_MARKER_TRIGGER_FLASH_NV = 7, + VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_START_NV = 8, + VK_LATENCY_MARKER_OUT_OF_BAND_RENDERSUBMIT_END_NV = 9, + VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_START_NV = 10, + VK_LATENCY_MARKER_OUT_OF_BAND_PRESENT_END_NV = 11, + VK_LATENCY_MARKER_MAX_ENUM_NV = 0x7FFFFFFF +} VkLatencyMarkerNV; + +typedef enum VkOutOfBandQueueTypeNV { + VK_OUT_OF_BAND_QUEUE_TYPE_RENDER_NV = 0, + VK_OUT_OF_BAND_QUEUE_TYPE_PRESENT_NV = 1, + VK_OUT_OF_BAND_QUEUE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF +} VkOutOfBandQueueTypeNV; +typedef struct VkLatencySleepModeInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 lowLatencyMode; + VkBool32 lowLatencyBoost; + uint32_t minimumIntervalUs; +} VkLatencySleepModeInfoNV; + +typedef struct VkLatencySleepInfoNV { + VkStructureType sType; + const void* pNext; + VkSemaphore signalSemaphore; + uint64_t value; +} VkLatencySleepInfoNV; + +typedef struct VkSetLatencyMarkerInfoNV { + VkStructureType sType; + const void* pNext; + uint64_t presentID; + VkLatencyMarkerNV marker; +} VkSetLatencyMarkerInfoNV; + +typedef struct VkLatencyTimingsFrameReportNV { + VkStructureType sType; + const void* pNext; + uint64_t presentID; + uint64_t inputSampleTimeUs; + uint64_t simStartTimeUs; + uint64_t simEndTimeUs; + uint64_t renderSubmitStartTimeUs; + uint64_t renderSubmitEndTimeUs; + uint64_t presentStartTimeUs; + uint64_t presentEndTimeUs; + uint64_t driverStartTimeUs; + uint64_t driverEndTimeUs; + uint64_t osRenderQueueStartTimeUs; + uint64_t osRenderQueueEndTimeUs; + uint64_t gpuRenderStartTimeUs; + uint64_t gpuRenderEndTimeUs; +} VkLatencyTimingsFrameReportNV; + +typedef struct VkGetLatencyMarkerInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t timingCount; + VkLatencyTimingsFrameReportNV* pTimings; +} VkGetLatencyMarkerInfoNV; + +typedef struct VkLatencySubmissionPresentIdNV { + VkStructureType sType; + const void* pNext; + uint64_t presentID; +} VkLatencySubmissionPresentIdNV; + +typedef struct VkSwapchainLatencyCreateInfoNV { + VkStructureType sType; + const void* pNext; + VkBool32 latencyModeEnable; +} VkSwapchainLatencyCreateInfoNV; + +typedef struct VkOutOfBandQueueTypeInfoNV { + VkStructureType sType; + const void* pNext; + VkOutOfBandQueueTypeNV queueType; +} VkOutOfBandQueueTypeInfoNV; + +typedef struct VkLatencySurfaceCapabilitiesNV { + VkStructureType sType; + const void* pNext; + uint32_t presentModeCount; + VkPresentModeKHR* pPresentModes; +} VkLatencySurfaceCapabilitiesNV; + +typedef VkResult (VKAPI_PTR *PFN_vkSetLatencySleepModeNV)(VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepModeInfoNV* pSleepModeInfo); +typedef VkResult (VKAPI_PTR *PFN_vkLatencySleepNV)(VkDevice device, VkSwapchainKHR swapchain, const VkLatencySleepInfoNV* pSleepInfo); +typedef void (VKAPI_PTR *PFN_vkSetLatencyMarkerNV)(VkDevice device, VkSwapchainKHR swapchain, const VkSetLatencyMarkerInfoNV* pLatencyMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkGetLatencyTimingsNV)(VkDevice device, VkSwapchainKHR swapchain, VkGetLatencyMarkerInfoNV* pLatencyMarkerInfo); +typedef void (VKAPI_PTR *PFN_vkQueueNotifyOutOfBandNV)(VkQueue queue, const VkOutOfBandQueueTypeInfoNV* pQueueTypeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkSetLatencySleepModeNV( + VkDevice device, + VkSwapchainKHR swapchain, + const VkLatencySleepModeInfoNV* pSleepModeInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkLatencySleepNV( + VkDevice device, + VkSwapchainKHR swapchain, + const VkLatencySleepInfoNV* pSleepInfo); + +VKAPI_ATTR void VKAPI_CALL vkSetLatencyMarkerNV( + VkDevice device, + VkSwapchainKHR swapchain, + const VkSetLatencyMarkerInfoNV* pLatencyMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkGetLatencyTimingsNV( + VkDevice device, + VkSwapchainKHR swapchain, + VkGetLatencyMarkerInfoNV* pLatencyMarkerInfo); + +VKAPI_ATTR void VKAPI_CALL vkQueueNotifyOutOfBandNV( + VkQueue queue, + const VkOutOfBandQueueTypeInfoNV* pQueueTypeInfo); +#endif + + +// VK_QCOM_multiview_per_view_render_areas is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_multiview_per_view_render_areas 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_SPEC_VERSION 1 +#define VK_QCOM_MULTIVIEW_PER_VIEW_RENDER_AREAS_EXTENSION_NAME "VK_QCOM_multiview_per_view_render_areas" +typedef struct VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 multiviewPerViewRenderAreas; +} VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM; + +typedef struct VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM { + VkStructureType sType; + const void* pNext; + uint32_t perViewRenderAreaCount; + const VkRect2D* pPerViewRenderAreas; +} VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM; + + + +// VK_NV_per_stage_descriptor_set is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_per_stage_descriptor_set 1 +#define VK_NV_PER_STAGE_DESCRIPTOR_SET_SPEC_VERSION 1 +#define VK_NV_PER_STAGE_DESCRIPTOR_SET_EXTENSION_NAME "VK_NV_per_stage_descriptor_set" +typedef struct VkPhysicalDevicePerStageDescriptorSetFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 perStageDescriptorSet; + VkBool32 dynamicPipelineLayout; +} VkPhysicalDevicePerStageDescriptorSetFeaturesNV; + + + +// VK_QCOM_image_processing2 is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_image_processing2 1 +#define VK_QCOM_IMAGE_PROCESSING_2_SPEC_VERSION 1 +#define VK_QCOM_IMAGE_PROCESSING_2_EXTENSION_NAME "VK_QCOM_image_processing2" + +typedef enum VkBlockMatchWindowCompareModeQCOM { + VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MIN_QCOM = 0, + VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_QCOM = 1, + VK_BLOCK_MATCH_WINDOW_COMPARE_MODE_MAX_ENUM_QCOM = 0x7FFFFFFF +} VkBlockMatchWindowCompareModeQCOM; +typedef struct VkPhysicalDeviceImageProcessing2FeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 textureBlockMatch2; +} VkPhysicalDeviceImageProcessing2FeaturesQCOM; + +typedef struct VkPhysicalDeviceImageProcessing2PropertiesQCOM { + VkStructureType sType; + void* pNext; + VkExtent2D maxBlockMatchWindow; +} VkPhysicalDeviceImageProcessing2PropertiesQCOM; + +typedef struct VkSamplerBlockMatchWindowCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkExtent2D windowExtent; + VkBlockMatchWindowCompareModeQCOM windowCompareMode; +} VkSamplerBlockMatchWindowCreateInfoQCOM; + + + +// VK_QCOM_filter_cubic_weights is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_filter_cubic_weights 1 +#define VK_QCOM_FILTER_CUBIC_WEIGHTS_SPEC_VERSION 1 +#define VK_QCOM_FILTER_CUBIC_WEIGHTS_EXTENSION_NAME "VK_QCOM_filter_cubic_weights" + +typedef enum VkCubicFilterWeightsQCOM { + VK_CUBIC_FILTER_WEIGHTS_CATMULL_ROM_QCOM = 0, + VK_CUBIC_FILTER_WEIGHTS_ZERO_TANGENT_CARDINAL_QCOM = 1, + VK_CUBIC_FILTER_WEIGHTS_B_SPLINE_QCOM = 2, + VK_CUBIC_FILTER_WEIGHTS_MITCHELL_NETRAVALI_QCOM = 3, + VK_CUBIC_FILTER_WEIGHTS_MAX_ENUM_QCOM = 0x7FFFFFFF +} VkCubicFilterWeightsQCOM; +typedef struct VkPhysicalDeviceCubicWeightsFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 selectableCubicWeights; +} VkPhysicalDeviceCubicWeightsFeaturesQCOM; + +typedef struct VkSamplerCubicWeightsCreateInfoQCOM { + VkStructureType sType; + const void* pNext; + VkCubicFilterWeightsQCOM cubicWeights; +} VkSamplerCubicWeightsCreateInfoQCOM; + +typedef struct VkBlitImageCubicWeightsInfoQCOM { + VkStructureType sType; + const void* pNext; + VkCubicFilterWeightsQCOM cubicWeights; +} VkBlitImageCubicWeightsInfoQCOM; + + + +// VK_QCOM_ycbcr_degamma is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_ycbcr_degamma 1 +#define VK_QCOM_YCBCR_DEGAMMA_SPEC_VERSION 1 +#define VK_QCOM_YCBCR_DEGAMMA_EXTENSION_NAME "VK_QCOM_ycbcr_degamma" +typedef struct VkPhysicalDeviceYcbcrDegammaFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 ycbcrDegamma; +} VkPhysicalDeviceYcbcrDegammaFeaturesQCOM; + +typedef struct VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM { + VkStructureType sType; + void* pNext; + VkBool32 enableYDegamma; + VkBool32 enableCbCrDegamma; +} VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM; + + + +// VK_QCOM_filter_cubic_clamp is a preprocessor guard. Do not pass it to API calls. +#define VK_QCOM_filter_cubic_clamp 1 +#define VK_QCOM_FILTER_CUBIC_CLAMP_SPEC_VERSION 1 +#define VK_QCOM_FILTER_CUBIC_CLAMP_EXTENSION_NAME "VK_QCOM_filter_cubic_clamp" +typedef struct VkPhysicalDeviceCubicClampFeaturesQCOM { + VkStructureType sType; + void* pNext; + VkBool32 cubicRangeClamp; +} VkPhysicalDeviceCubicClampFeaturesQCOM; + + + +// VK_EXT_attachment_feedback_loop_dynamic_state is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_attachment_feedback_loop_dynamic_state 1 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_SPEC_VERSION 1 +#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_dynamic_state" +typedef struct VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 attachmentFeedbackLoopDynamicState; +} VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT)(VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdSetAttachmentFeedbackLoopEnableEXT( + VkCommandBuffer commandBuffer, + VkImageAspectFlags aspectMask); +#endif + + +// VK_MSFT_layered_driver is a preprocessor guard. Do not pass it to API calls. +#define VK_MSFT_layered_driver 1 +#define VK_MSFT_LAYERED_DRIVER_SPEC_VERSION 1 +#define VK_MSFT_LAYERED_DRIVER_EXTENSION_NAME "VK_MSFT_layered_driver" + +typedef enum VkLayeredDriverUnderlyingApiMSFT { + VK_LAYERED_DRIVER_UNDERLYING_API_NONE_MSFT = 0, + VK_LAYERED_DRIVER_UNDERLYING_API_D3D12_MSFT = 1, + VK_LAYERED_DRIVER_UNDERLYING_API_MAX_ENUM_MSFT = 0x7FFFFFFF +} VkLayeredDriverUnderlyingApiMSFT; +typedef struct VkPhysicalDeviceLayeredDriverPropertiesMSFT { + VkStructureType sType; + void* pNext; + VkLayeredDriverUnderlyingApiMSFT underlyingAPI; +} VkPhysicalDeviceLayeredDriverPropertiesMSFT; + + + +// VK_NV_descriptor_pool_overallocation is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_descriptor_pool_overallocation 1 +#define VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_SPEC_VERSION 1 +#define VK_NV_DESCRIPTOR_POOL_OVERALLOCATION_EXTENSION_NAME "VK_NV_descriptor_pool_overallocation" +typedef struct VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 descriptorPoolOverallocation; +} VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV; + + + +// VK_NV_raw_access_chains is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_raw_access_chains 1 +#define VK_NV_RAW_ACCESS_CHAINS_SPEC_VERSION 1 +#define VK_NV_RAW_ACCESS_CHAINS_EXTENSION_NAME "VK_NV_raw_access_chains" +typedef struct VkPhysicalDeviceRawAccessChainsFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shaderRawAccessChains; +} VkPhysicalDeviceRawAccessChainsFeaturesNV; + + + +// VK_NV_command_buffer_inheritance is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_command_buffer_inheritance 1 +#define VK_NV_COMMAND_BUFFER_INHERITANCE_SPEC_VERSION 1 +#define VK_NV_COMMAND_BUFFER_INHERITANCE_EXTENSION_NAME "VK_NV_command_buffer_inheritance" +typedef struct VkPhysicalDeviceCommandBufferInheritanceFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 commandBufferInheritance; +} VkPhysicalDeviceCommandBufferInheritanceFeaturesNV; + + + +// VK_NV_shader_atomic_float16_vector is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_shader_atomic_float16_vector 1 +#define VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_SPEC_VERSION 1 +#define VK_NV_SHADER_ATOMIC_FLOAT16_VECTOR_EXTENSION_NAME "VK_NV_shader_atomic_float16_vector" +typedef struct VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 shaderFloat16VectorAtomics; +} VkPhysicalDeviceShaderAtomicFloat16VectorFeaturesNV; + + + +// VK_EXT_shader_replicated_composites is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_shader_replicated_composites 1 +#define VK_EXT_SHADER_REPLICATED_COMPOSITES_SPEC_VERSION 1 +#define VK_EXT_SHADER_REPLICATED_COMPOSITES_EXTENSION_NAME "VK_EXT_shader_replicated_composites" +typedef struct VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 shaderReplicatedComposites; +} VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT; + + + +// VK_NV_ray_tracing_validation is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_ray_tracing_validation 1 +#define VK_NV_RAY_TRACING_VALIDATION_SPEC_VERSION 1 +#define VK_NV_RAY_TRACING_VALIDATION_EXTENSION_NAME "VK_NV_ray_tracing_validation" +typedef struct VkPhysicalDeviceRayTracingValidationFeaturesNV { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingValidation; +} VkPhysicalDeviceRayTracingValidationFeaturesNV; + + + +// VK_EXT_device_generated_commands is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_device_generated_commands 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectExecutionSetEXT) +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutEXT) +#define VK_EXT_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 1 +#define VK_EXT_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_EXT_device_generated_commands" + +typedef enum VkIndirectExecutionSetInfoTypeEXT { + VK_INDIRECT_EXECUTION_SET_INFO_TYPE_PIPELINES_EXT = 0, + VK_INDIRECT_EXECUTION_SET_INFO_TYPE_SHADER_OBJECTS_EXT = 1, + VK_INDIRECT_EXECUTION_SET_INFO_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectExecutionSetInfoTypeEXT; + +typedef enum VkIndirectCommandsTokenTypeEXT { + VK_INDIRECT_COMMANDS_TOKEN_TYPE_EXECUTION_SET_EXT = 0, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_EXT = 1, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_SEQUENCE_INDEX_EXT = 2, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_EXT = 3, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_EXT = 4, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_EXT = 5, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_EXT = 6, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_COUNT_EXT = 7, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_COUNT_EXT = 8, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_EXT = 9, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV_EXT = 1000202002, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_NV_EXT = 1000202003, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_EXT = 1000328000, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_COUNT_EXT = 1000328001, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_TRACE_RAYS2_EXT = 1000386004, + VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectCommandsTokenTypeEXT; + +typedef enum VkIndirectCommandsInputModeFlagBitsEXT { + VK_INDIRECT_COMMANDS_INPUT_MODE_VULKAN_INDEX_BUFFER_EXT = 0x00000001, + VK_INDIRECT_COMMANDS_INPUT_MODE_DXGI_INDEX_BUFFER_EXT = 0x00000002, + VK_INDIRECT_COMMANDS_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectCommandsInputModeFlagBitsEXT; +typedef VkFlags VkIndirectCommandsInputModeFlagsEXT; + +typedef enum VkIndirectCommandsLayoutUsageFlagBitsEXT { + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_EXT = 0x00000001, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_EXT = 0x00000002, + VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkIndirectCommandsLayoutUsageFlagBitsEXT; +typedef VkFlags VkIndirectCommandsLayoutUsageFlagsEXT; +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 deviceGeneratedCommands; + VkBool32 dynamicGeneratedPipelineLayout; +} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesEXT; + +typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxIndirectPipelineCount; + uint32_t maxIndirectShaderObjectCount; + uint32_t maxIndirectSequenceCount; + uint32_t maxIndirectCommandsTokenCount; + uint32_t maxIndirectCommandsTokenOffset; + uint32_t maxIndirectCommandsIndirectStride; + VkIndirectCommandsInputModeFlagsEXT supportedIndirectCommandsInputModes; + VkShaderStageFlags supportedIndirectCommandsShaderStages; + VkShaderStageFlags supportedIndirectCommandsShaderStagesPipelineBinding; + VkShaderStageFlags supportedIndirectCommandsShaderStagesShaderBinding; + VkBool32 deviceGeneratedCommandsTransformFeedback; + VkBool32 deviceGeneratedCommandsMultiDrawIndirectCount; +} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesEXT; + +typedef struct VkGeneratedCommandsMemoryRequirementsInfoEXT { + VkStructureType sType; + void* pNext; + VkIndirectExecutionSetEXT indirectExecutionSet; + VkIndirectCommandsLayoutEXT indirectCommandsLayout; + uint32_t maxSequenceCount; + uint32_t maxDrawCount; +} VkGeneratedCommandsMemoryRequirementsInfoEXT; + +typedef struct VkIndirectExecutionSetPipelineInfoEXT { + VkStructureType sType; + const void* pNext; + VkPipeline initialPipeline; + uint32_t maxPipelineCount; +} VkIndirectExecutionSetPipelineInfoEXT; + +typedef struct VkIndirectExecutionSetShaderLayoutInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t setLayoutCount; + const VkDescriptorSetLayout* pSetLayouts; +} VkIndirectExecutionSetShaderLayoutInfoEXT; + +typedef struct VkIndirectExecutionSetShaderInfoEXT { + VkStructureType sType; + const void* pNext; + uint32_t shaderCount; + const VkShaderEXT* pInitialShaders; + const VkIndirectExecutionSetShaderLayoutInfoEXT* pSetLayoutInfos; + uint32_t maxShaderCount; + uint32_t pushConstantRangeCount; + const VkPushConstantRange* pPushConstantRanges; +} VkIndirectExecutionSetShaderInfoEXT; + +typedef union VkIndirectExecutionSetInfoEXT { + const VkIndirectExecutionSetPipelineInfoEXT* pPipelineInfo; + const VkIndirectExecutionSetShaderInfoEXT* pShaderInfo; +} VkIndirectExecutionSetInfoEXT; + +typedef struct VkIndirectExecutionSetCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkIndirectExecutionSetInfoTypeEXT type; + VkIndirectExecutionSetInfoEXT info; +} VkIndirectExecutionSetCreateInfoEXT; + +typedef struct VkGeneratedCommandsInfoEXT { + VkStructureType sType; + const void* pNext; + VkShaderStageFlags shaderStages; + VkIndirectExecutionSetEXT indirectExecutionSet; + VkIndirectCommandsLayoutEXT indirectCommandsLayout; + VkDeviceAddress indirectAddress; + VkDeviceSize indirectAddressSize; + VkDeviceAddress preprocessAddress; + VkDeviceSize preprocessSize; + uint32_t maxSequenceCount; + VkDeviceAddress sequenceCountAddress; + uint32_t maxDrawCount; +} VkGeneratedCommandsInfoEXT; + +typedef struct VkWriteIndirectExecutionSetPipelineEXT { + VkStructureType sType; + const void* pNext; + uint32_t index; + VkPipeline pipeline; +} VkWriteIndirectExecutionSetPipelineEXT; + +typedef struct VkIndirectCommandsPushConstantTokenEXT { + VkPushConstantRange updateRange; +} VkIndirectCommandsPushConstantTokenEXT; + +typedef struct VkIndirectCommandsVertexBufferTokenEXT { + uint32_t vertexBindingUnit; +} VkIndirectCommandsVertexBufferTokenEXT; + +typedef struct VkIndirectCommandsIndexBufferTokenEXT { + VkIndirectCommandsInputModeFlagBitsEXT mode; +} VkIndirectCommandsIndexBufferTokenEXT; + +typedef struct VkIndirectCommandsExecutionSetTokenEXT { + VkIndirectExecutionSetInfoTypeEXT type; + VkShaderStageFlags shaderStages; +} VkIndirectCommandsExecutionSetTokenEXT; + +typedef union VkIndirectCommandsTokenDataEXT { + const VkIndirectCommandsPushConstantTokenEXT* pPushConstant; + const VkIndirectCommandsVertexBufferTokenEXT* pVertexBuffer; + const VkIndirectCommandsIndexBufferTokenEXT* pIndexBuffer; + const VkIndirectCommandsExecutionSetTokenEXT* pExecutionSet; +} VkIndirectCommandsTokenDataEXT; + +typedef struct VkIndirectCommandsLayoutTokenEXT { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsTokenTypeEXT type; + VkIndirectCommandsTokenDataEXT data; + uint32_t offset; +} VkIndirectCommandsLayoutTokenEXT; + +typedef struct VkIndirectCommandsLayoutCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkIndirectCommandsLayoutUsageFlagsEXT flags; + VkShaderStageFlags shaderStages; + uint32_t indirectStride; + VkPipelineLayout pipelineLayout; + uint32_t tokenCount; + const VkIndirectCommandsLayoutTokenEXT* pTokens; +} VkIndirectCommandsLayoutCreateInfoEXT; + +typedef struct VkDrawIndirectCountIndirectCommandEXT { + VkDeviceAddress bufferAddress; + uint32_t stride; + uint32_t commandCount; +} VkDrawIndirectCountIndirectCommandEXT; + +typedef struct VkBindVertexBufferIndirectCommandEXT { + VkDeviceAddress bufferAddress; + uint32_t size; + uint32_t stride; +} VkBindVertexBufferIndirectCommandEXT; + +typedef struct VkBindIndexBufferIndirectCommandEXT { + VkDeviceAddress bufferAddress; + uint32_t size; + VkIndexType indexType; +} VkBindIndexBufferIndirectCommandEXT; + +typedef struct VkGeneratedCommandsPipelineInfoEXT { + VkStructureType sType; + void* pNext; + VkPipeline pipeline; +} VkGeneratedCommandsPipelineInfoEXT; + +typedef struct VkGeneratedCommandsShaderInfoEXT { + VkStructureType sType; + void* pNext; + uint32_t shaderCount; + const VkShaderEXT* pShaders; +} VkGeneratedCommandsShaderInfoEXT; + +typedef struct VkWriteIndirectExecutionSetShaderEXT { + VkStructureType sType; + const void* pNext; + uint32_t index; + VkShaderEXT shader; +} VkWriteIndirectExecutionSetShaderEXT; + +typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsEXT)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, VkMemoryRequirements2* pMemoryRequirements); +typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsEXT)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, VkCommandBuffer stateCommandBuffer); +typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsEXT)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutEXT)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutEXT)(VkDevice device, VkIndirectCommandsLayoutEXT indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectExecutionSetEXT)(VkDevice device, const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectExecutionSetEXT* pIndirectExecutionSet); +typedef void (VKAPI_PTR *PFN_vkDestroyIndirectExecutionSetEXT)(VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkUpdateIndirectExecutionSetPipelineEXT)(VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites); +typedef void (VKAPI_PTR *PFN_vkUpdateIndirectExecutionSetShaderEXT)(VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsEXT( + VkDevice device, + const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, + VkMemoryRequirements2* pMemoryRequirements); + +VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsEXT( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, + VkCommandBuffer stateCommandBuffer); + +VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsEXT( + VkCommandBuffer commandBuffer, + VkBool32 isPreprocessed, + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutEXT( + VkDevice device, + const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutEXT( + VkDevice device, + VkIndirectCommandsLayoutEXT indirectCommandsLayout, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectExecutionSetEXT( + VkDevice device, + const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkIndirectExecutionSetEXT* pIndirectExecutionSet); + +VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectExecutionSetEXT( + VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkUpdateIndirectExecutionSetPipelineEXT( + VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites); + +VKAPI_ATTR void VKAPI_CALL vkUpdateIndirectExecutionSetShaderEXT( + VkDevice device, + VkIndirectExecutionSetEXT indirectExecutionSet, + uint32_t executionSetWriteCount, + const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites); +#endif + + +// VK_MESA_image_alignment_control is a preprocessor guard. Do not pass it to API calls. +#define VK_MESA_image_alignment_control 1 +#define VK_MESA_IMAGE_ALIGNMENT_CONTROL_SPEC_VERSION 1 +#define VK_MESA_IMAGE_ALIGNMENT_CONTROL_EXTENSION_NAME "VK_MESA_image_alignment_control" +typedef struct VkPhysicalDeviceImageAlignmentControlFeaturesMESA { + VkStructureType sType; + void* pNext; + VkBool32 imageAlignmentControl; +} VkPhysicalDeviceImageAlignmentControlFeaturesMESA; + +typedef struct VkPhysicalDeviceImageAlignmentControlPropertiesMESA { + VkStructureType sType; + void* pNext; + uint32_t supportedImageAlignmentMask; +} VkPhysicalDeviceImageAlignmentControlPropertiesMESA; + +typedef struct VkImageAlignmentControlCreateInfoMESA { + VkStructureType sType; + const void* pNext; + uint32_t maximumRequestedAlignment; +} VkImageAlignmentControlCreateInfoMESA; + + + +// VK_EXT_depth_clamp_control is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_depth_clamp_control 1 +#define VK_EXT_DEPTH_CLAMP_CONTROL_SPEC_VERSION 1 +#define VK_EXT_DEPTH_CLAMP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clamp_control" +typedef struct VkPhysicalDeviceDepthClampControlFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 depthClampControl; +} VkPhysicalDeviceDepthClampControlFeaturesEXT; + +typedef struct VkPipelineViewportDepthClampControlCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDepthClampModeEXT depthClampMode; + const VkDepthClampRangeEXT* pDepthClampRange; +} VkPipelineViewportDepthClampControlCreateInfoEXT; + + + +// VK_KHR_acceleration_structure is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_acceleration_structure 1 +#define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13 +#define VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_KHR_acceleration_structure" + +typedef enum VkBuildAccelerationStructureModeKHR { + VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR = 0, + VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR = 1, + VK_BUILD_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF +} VkBuildAccelerationStructureModeKHR; + +typedef enum VkAccelerationStructureCreateFlagBitsKHR { + VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000001, + VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000008, + VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV = 0x00000004, + VK_ACCELERATION_STRUCTURE_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF +} VkAccelerationStructureCreateFlagBitsKHR; +typedef VkFlags VkAccelerationStructureCreateFlagsKHR; +typedef struct VkAccelerationStructureBuildRangeInfoKHR { + uint32_t primitiveCount; + uint32_t primitiveOffset; + uint32_t firstVertex; + uint32_t transformOffset; +} VkAccelerationStructureBuildRangeInfoKHR; + +typedef struct VkAccelerationStructureGeometryTrianglesDataKHR { + VkStructureType sType; + const void* pNext; + VkFormat vertexFormat; + VkDeviceOrHostAddressConstKHR vertexData; + VkDeviceSize vertexStride; + uint32_t maxVertex; + VkIndexType indexType; + VkDeviceOrHostAddressConstKHR indexData; + VkDeviceOrHostAddressConstKHR transformData; +} VkAccelerationStructureGeometryTrianglesDataKHR; + +typedef struct VkAccelerationStructureGeometryAabbsDataKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR data; + VkDeviceSize stride; +} VkAccelerationStructureGeometryAabbsDataKHR; + +typedef struct VkAccelerationStructureGeometryInstancesDataKHR { + VkStructureType sType; + const void* pNext; + VkBool32 arrayOfPointers; + VkDeviceOrHostAddressConstKHR data; +} VkAccelerationStructureGeometryInstancesDataKHR; + +typedef union VkAccelerationStructureGeometryDataKHR { + VkAccelerationStructureGeometryTrianglesDataKHR triangles; + VkAccelerationStructureGeometryAabbsDataKHR aabbs; + VkAccelerationStructureGeometryInstancesDataKHR instances; +} VkAccelerationStructureGeometryDataKHR; + +typedef struct VkAccelerationStructureGeometryKHR { + VkStructureType sType; + const void* pNext; + VkGeometryTypeKHR geometryType; + VkAccelerationStructureGeometryDataKHR geometry; + VkGeometryFlagsKHR flags; +} VkAccelerationStructureGeometryKHR; + +typedef struct VkAccelerationStructureBuildGeometryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureTypeKHR type; + VkBuildAccelerationStructureFlagsKHR flags; + VkBuildAccelerationStructureModeKHR mode; + VkAccelerationStructureKHR srcAccelerationStructure; + VkAccelerationStructureKHR dstAccelerationStructure; + uint32_t geometryCount; + const VkAccelerationStructureGeometryKHR* pGeometries; + const VkAccelerationStructureGeometryKHR* const* ppGeometries; + VkDeviceOrHostAddressKHR scratchData; +} VkAccelerationStructureBuildGeometryInfoKHR; + +typedef struct VkAccelerationStructureCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureCreateFlagsKHR createFlags; + VkBuffer buffer; + VkDeviceSize offset; + VkDeviceSize size; + VkAccelerationStructureTypeKHR type; + VkDeviceAddress deviceAddress; +} VkAccelerationStructureCreateInfoKHR; + +typedef struct VkWriteDescriptorSetAccelerationStructureKHR { + VkStructureType sType; + const void* pNext; + uint32_t accelerationStructureCount; + const VkAccelerationStructureKHR* pAccelerationStructures; +} VkWriteDescriptorSetAccelerationStructureKHR; + +typedef struct VkPhysicalDeviceAccelerationStructureFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 accelerationStructure; + VkBool32 accelerationStructureCaptureReplay; + VkBool32 accelerationStructureIndirectBuild; + VkBool32 accelerationStructureHostCommands; + VkBool32 descriptorBindingAccelerationStructureUpdateAfterBind; +} VkPhysicalDeviceAccelerationStructureFeaturesKHR; + +typedef struct VkPhysicalDeviceAccelerationStructurePropertiesKHR { + VkStructureType sType; + void* pNext; + uint64_t maxGeometryCount; + uint64_t maxInstanceCount; + uint64_t maxPrimitiveCount; + uint32_t maxPerStageDescriptorAccelerationStructures; + uint32_t maxPerStageDescriptorUpdateAfterBindAccelerationStructures; + uint32_t maxDescriptorSetAccelerationStructures; + uint32_t maxDescriptorSetUpdateAfterBindAccelerationStructures; + uint32_t minAccelerationStructureScratchOffsetAlignment; +} VkPhysicalDeviceAccelerationStructurePropertiesKHR; + +typedef struct VkAccelerationStructureDeviceAddressInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR accelerationStructure; +} VkAccelerationStructureDeviceAddressInfoKHR; + +typedef struct VkAccelerationStructureVersionInfoKHR { + VkStructureType sType; + const void* pNext; + const uint8_t* pVersionData; +} VkAccelerationStructureVersionInfoKHR; + +typedef struct VkCopyAccelerationStructureToMemoryInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkDeviceOrHostAddressKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureToMemoryInfoKHR; + +typedef struct VkCopyMemoryToAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceOrHostAddressConstKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyMemoryToAccelerationStructureInfoKHR; + +typedef struct VkCopyAccelerationStructureInfoKHR { + VkStructureType sType; + const void* pNext; + VkAccelerationStructureKHR src; + VkAccelerationStructureKHR dst; + VkCopyAccelerationStructureModeKHR mode; +} VkCopyAccelerationStructureInfoKHR; + +typedef struct VkAccelerationStructureBuildSizesInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceSize accelerationStructureSize; + VkDeviceSize updateScratchSize; + VkDeviceSize buildScratchSize; +} VkAccelerationStructureBuildSizesInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureKHR)(VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure); +typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresIndirectKHR)(VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const* ppMaxPrimitiveCounts); +typedef VkResult (VKAPI_PTR *PFN_vkBuildAccelerationStructuresKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureToMemoryKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkResult (VKAPI_PTR *PFN_vkWriteAccelerationStructuresPropertiesKHR)(VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureToMemoryKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); +typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetAccelerationStructureDeviceAddressKHR)(VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); +typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery); +typedef void (VKAPI_PTR *PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)(VkDevice device, const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility); +typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureBuildSizesKHR)(VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureKHR( + VkDevice device, + const VkAccelerationStructureCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkAccelerationStructureKHR* pAccelerationStructure); + +VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureKHR( + VkDevice device, + VkAccelerationStructureKHR accelerationStructure, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); + +VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresIndirectKHR( + VkCommandBuffer commandBuffer, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkDeviceAddress* pIndirectDeviceAddresses, + const uint32_t* pIndirectStrides, + const uint32_t* const* ppMaxPrimitiveCounts); + +VKAPI_ATTR VkResult VKAPI_CALL vkBuildAccelerationStructuresKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + uint32_t infoCount, + const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureToMemoryKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToAccelerationStructureKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkWriteAccelerationStructuresPropertiesKHR( + VkDevice device, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + size_t dataSize, + void* pData, + size_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureToMemoryKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToAccelerationStructureKHR( + VkCommandBuffer commandBuffer, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + +VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetAccelerationStructureDeviceAddressKHR( + VkDevice device, + const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); + +VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesKHR( + VkCommandBuffer commandBuffer, + uint32_t accelerationStructureCount, + const VkAccelerationStructureKHR* pAccelerationStructures, + VkQueryType queryType, + VkQueryPool queryPool, + uint32_t firstQuery); + +VKAPI_ATTR void VKAPI_CALL vkGetDeviceAccelerationStructureCompatibilityKHR( + VkDevice device, + const VkAccelerationStructureVersionInfoKHR* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility); + +VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureBuildSizesKHR( + VkDevice device, + VkAccelerationStructureBuildTypeKHR buildType, + const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, + const uint32_t* pMaxPrimitiveCounts, + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); +#endif + + +// VK_KHR_ray_tracing_pipeline is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_tracing_pipeline 1 +#define VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION 1 +#define VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME "VK_KHR_ray_tracing_pipeline" + +typedef enum VkShaderGroupShaderKHR { + VK_SHADER_GROUP_SHADER_GENERAL_KHR = 0, + VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR = 1, + VK_SHADER_GROUP_SHADER_ANY_HIT_KHR = 2, + VK_SHADER_GROUP_SHADER_INTERSECTION_KHR = 3, + VK_SHADER_GROUP_SHADER_MAX_ENUM_KHR = 0x7FFFFFFF +} VkShaderGroupShaderKHR; +typedef struct VkRayTracingShaderGroupCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkRayTracingShaderGroupTypeKHR type; + uint32_t generalShader; + uint32_t closestHitShader; + uint32_t anyHitShader; + uint32_t intersectionShader; + const void* pShaderGroupCaptureReplayHandle; +} VkRayTracingShaderGroupCreateInfoKHR; + +typedef struct VkRayTracingPipelineInterfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t maxPipelineRayPayloadSize; + uint32_t maxPipelineRayHitAttributeSize; +} VkRayTracingPipelineInterfaceCreateInfoKHR; + +typedef struct VkRayTracingPipelineCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkPipelineCreateFlags flags; + uint32_t stageCount; + const VkPipelineShaderStageCreateInfo* pStages; + uint32_t groupCount; + const VkRayTracingShaderGroupCreateInfoKHR* pGroups; + uint32_t maxPipelineRayRecursionDepth; + const VkPipelineLibraryCreateInfoKHR* pLibraryInfo; + const VkRayTracingPipelineInterfaceCreateInfoKHR* pLibraryInterface; + const VkPipelineDynamicStateCreateInfo* pDynamicState; + VkPipelineLayout layout; + VkPipeline basePipelineHandle; + int32_t basePipelineIndex; +} VkRayTracingPipelineCreateInfoKHR; + +typedef struct VkPhysicalDeviceRayTracingPipelineFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayTracingPipeline; + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplay; + VkBool32 rayTracingPipelineShaderGroupHandleCaptureReplayMixed; + VkBool32 rayTracingPipelineTraceRaysIndirect; + VkBool32 rayTraversalPrimitiveCulling; +} VkPhysicalDeviceRayTracingPipelineFeaturesKHR; + +typedef struct VkPhysicalDeviceRayTracingPipelinePropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t shaderGroupHandleSize; + uint32_t maxRayRecursionDepth; + uint32_t maxShaderGroupStride; + uint32_t shaderGroupBaseAlignment; + uint32_t shaderGroupHandleCaptureReplaySize; + uint32_t maxRayDispatchInvocationCount; + uint32_t shaderGroupHandleAlignment; + uint32_t maxRayHitAttributeSize; +} VkPhysicalDeviceRayTracingPipelinePropertiesKHR; + +typedef struct VkStridedDeviceAddressRegionKHR { + VkDeviceAddress deviceAddress; + VkDeviceSize stride; + VkDeviceSize size; +} VkStridedDeviceAddressRegionKHR; + +typedef struct VkTraceRaysIndirectCommandKHR { + uint32_t width; + uint32_t height; + uint32_t depth; +} VkTraceRaysIndirectCommandKHR; + +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth); +typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines); +typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData); +typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirectKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress); +typedef VkDeviceSize (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupStackSizeKHR)(VkDevice device, VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader); +typedef void (VKAPI_PTR *PFN_vkCmdSetRayTracingPipelineStackSizeKHR)(VkCommandBuffer commandBuffer, uint32_t pipelineStackSize); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysKHR( + VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + uint32_t width, + uint32_t height, + uint32_t depth); + +VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesKHR( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, + const VkAllocationCallbacks* pAllocator, + VkPipeline* pPipelines); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t firstGroup, + uint32_t groupCount, + size_t dataSize, + void* pData); + +VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirectKHR( + VkCommandBuffer commandBuffer, + const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, + const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, + VkDeviceAddress indirectDeviceAddress); + +VKAPI_ATTR VkDeviceSize VKAPI_CALL vkGetRayTracingShaderGroupStackSizeKHR( + VkDevice device, + VkPipeline pipeline, + uint32_t group, + VkShaderGroupShaderKHR groupShader); + +VKAPI_ATTR void VKAPI_CALL vkCmdSetRayTracingPipelineStackSizeKHR( + VkCommandBuffer commandBuffer, + uint32_t pipelineStackSize); +#endif + + +// VK_KHR_ray_query is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_ray_query 1 +#define VK_KHR_RAY_QUERY_SPEC_VERSION 1 +#define VK_KHR_RAY_QUERY_EXTENSION_NAME "VK_KHR_ray_query" +typedef struct VkPhysicalDeviceRayQueryFeaturesKHR { + VkStructureType sType; + void* pNext; + VkBool32 rayQuery; +} VkPhysicalDeviceRayQueryFeaturesKHR; + + + +// VK_EXT_mesh_shader is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_mesh_shader 1 +#define VK_EXT_MESH_SHADER_SPEC_VERSION 1 +#define VK_EXT_MESH_SHADER_EXTENSION_NAME "VK_EXT_mesh_shader" +typedef struct VkPhysicalDeviceMeshShaderFeaturesEXT { + VkStructureType sType; + void* pNext; + VkBool32 taskShader; + VkBool32 meshShader; + VkBool32 multiviewMeshShader; + VkBool32 primitiveFragmentShadingRateMeshShader; + VkBool32 meshShaderQueries; +} VkPhysicalDeviceMeshShaderFeaturesEXT; + +typedef struct VkPhysicalDeviceMeshShaderPropertiesEXT { + VkStructureType sType; + void* pNext; + uint32_t maxTaskWorkGroupTotalCount; + uint32_t maxTaskWorkGroupCount[3]; + uint32_t maxTaskWorkGroupInvocations; + uint32_t maxTaskWorkGroupSize[3]; + uint32_t maxTaskPayloadSize; + uint32_t maxTaskSharedMemorySize; + uint32_t maxTaskPayloadAndSharedMemorySize; + uint32_t maxMeshWorkGroupTotalCount; + uint32_t maxMeshWorkGroupCount[3]; + uint32_t maxMeshWorkGroupInvocations; + uint32_t maxMeshWorkGroupSize[3]; + uint32_t maxMeshSharedMemorySize; + uint32_t maxMeshPayloadAndSharedMemorySize; + uint32_t maxMeshOutputMemorySize; + uint32_t maxMeshPayloadAndOutputMemorySize; + uint32_t maxMeshOutputComponents; + uint32_t maxMeshOutputVertices; + uint32_t maxMeshOutputPrimitives; + uint32_t maxMeshOutputLayers; + uint32_t maxMeshMultiviewViewCount; + uint32_t meshOutputPerVertexGranularity; + uint32_t meshOutputPerPrimitiveGranularity; + uint32_t maxPreferredTaskWorkGroupInvocations; + uint32_t maxPreferredMeshWorkGroupInvocations; + VkBool32 prefersLocalInvocationVertexOutput; + VkBool32 prefersLocalInvocationPrimitiveOutput; + VkBool32 prefersCompactVertexOutput; + VkBool32 prefersCompactPrimitiveOutput; +} VkPhysicalDeviceMeshShaderPropertiesEXT; + +typedef struct VkDrawMeshTasksIndirectCommandEXT { + uint32_t groupCountX; + uint32_t groupCountY; + uint32_t groupCountZ; +} VkDrawMeshTasksIndirectCommandEXT; + +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksEXT)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectEXT)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); +typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountEXT)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksEXT( + VkCommandBuffer commandBuffer, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectEXT( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + uint32_t drawCount, + uint32_t stride); + +VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountEXT( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset, + VkBuffer countBuffer, + VkDeviceSize countBufferOffset, + uint32_t maxDrawCount, + uint32_t stride); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_directfb.h b/dependencies/vulkan/include/vulkan/vulkan_directfb.h new file mode 100644 index 000000000000..f06f80b70e37 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_directfb.h @@ -0,0 +1,55 @@ +#ifndef VULKAN_DIRECTFB_H_ +#define VULKAN_DIRECTFB_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_EXT_directfb_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_directfb_surface 1 +#define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1 +#define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface" +typedef VkFlags VkDirectFBSurfaceCreateFlagsEXT; +typedef struct VkDirectFBSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkDirectFBSurfaceCreateFlagsEXT flags; + IDirectFB* dfb; + IDirectFBSurface* surface; +} VkDirectFBSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateDirectFBSurfaceEXT)(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT( + VkInstance instance, + const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + IDirectFB* dfb); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_fuchsia.h b/dependencies/vulkan/include/vulkan/vulkan_fuchsia.h new file mode 100644 index 000000000000..f60907d10b43 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_fuchsia.h @@ -0,0 +1,262 @@ +#ifndef VULKAN_FUCHSIA_H_ +#define VULKAN_FUCHSIA_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_FUCHSIA_imagepipe_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_FUCHSIA_imagepipe_surface 1 +#define VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION 1 +#define VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME "VK_FUCHSIA_imagepipe_surface" +typedef VkFlags VkImagePipeSurfaceCreateFlagsFUCHSIA; +typedef struct VkImagePipeSurfaceCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkImagePipeSurfaceCreateFlagsFUCHSIA flags; + zx_handle_t imagePipeHandle; +} VkImagePipeSurfaceCreateInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateImagePipeSurfaceFUCHSIA)(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA( + VkInstance instance, + const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_FUCHSIA_external_memory is a preprocessor guard. Do not pass it to API calls. +#define VK_FUCHSIA_external_memory 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory" +typedef struct VkImportMemoryZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + zx_handle_t handle; +} VkImportMemoryZirconHandleInfoFUCHSIA; + +typedef struct VkMemoryZirconHandlePropertiesFUCHSIA { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryZirconHandlePropertiesFUCHSIA; + +typedef struct VkMemoryGetZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetZirconHandleInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandleFUCHSIA)(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA( + VkDevice device, + const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + zx_handle_t zirconHandle, + VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties); +#endif + + +// VK_FUCHSIA_external_semaphore is a preprocessor guard. Do not pass it to API calls. +#define VK_FUCHSIA_external_semaphore 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 +#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore" +typedef struct VkImportSemaphoreZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + zx_handle_t zirconHandle; +} VkImportSemaphoreZirconHandleInfoFUCHSIA; + +typedef struct VkSemaphoreGetZirconHandleInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetZirconHandleInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA( + VkDevice device, + const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, + zx_handle_t* pZirconHandle); +#endif + + +// VK_FUCHSIA_buffer_collection is a preprocessor guard. Do not pass it to API calls. +#define VK_FUCHSIA_buffer_collection 1 +VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferCollectionFUCHSIA) +#define VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION 2 +#define VK_FUCHSIA_BUFFER_COLLECTION_EXTENSION_NAME "VK_FUCHSIA_buffer_collection" +typedef VkFlags VkImageFormatConstraintsFlagsFUCHSIA; + +typedef enum VkImageConstraintsInfoFlagBitsFUCHSIA { + VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA = 0x00000001, + VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA = 0x00000002, + VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA = 0x00000004, + VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA = 0x00000008, + VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA = 0x00000010, + VK_IMAGE_CONSTRAINTS_INFO_FLAG_BITS_MAX_ENUM_FUCHSIA = 0x7FFFFFFF +} VkImageConstraintsInfoFlagBitsFUCHSIA; +typedef VkFlags VkImageConstraintsInfoFlagsFUCHSIA; +typedef struct VkBufferCollectionCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + zx_handle_t collectionToken; +} VkBufferCollectionCreateInfoFUCHSIA; + +typedef struct VkImportMemoryBufferCollectionFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkImportMemoryBufferCollectionFUCHSIA; + +typedef struct VkBufferCollectionImageCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkBufferCollectionImageCreateInfoFUCHSIA; + +typedef struct VkBufferCollectionConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + uint32_t minBufferCount; + uint32_t maxBufferCount; + uint32_t minBufferCountForCamping; + uint32_t minBufferCountForDedicatedSlack; + uint32_t minBufferCountForSharedSlack; +} VkBufferCollectionConstraintsInfoFUCHSIA; + +typedef struct VkBufferConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCreateInfo createInfo; + VkFormatFeatureFlags requiredFormatFeatures; + VkBufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints; +} VkBufferConstraintsInfoFUCHSIA; + +typedef struct VkBufferCollectionBufferCreateInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkBufferCollectionFUCHSIA collection; + uint32_t index; +} VkBufferCollectionBufferCreateInfoFUCHSIA; + +typedef struct VkSysmemColorSpaceFUCHSIA { + VkStructureType sType; + const void* pNext; + uint32_t colorSpace; +} VkSysmemColorSpaceFUCHSIA; + +typedef struct VkBufferCollectionPropertiesFUCHSIA { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; + uint32_t bufferCount; + uint32_t createInfoIndex; + uint64_t sysmemPixelFormat; + VkFormatFeatureFlags formatFeatures; + VkSysmemColorSpaceFUCHSIA sysmemColorSpaceIndex; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkBufferCollectionPropertiesFUCHSIA; + +typedef struct VkImageFormatConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + VkImageCreateInfo imageCreateInfo; + VkFormatFeatureFlags requiredFormatFeatures; + VkImageFormatConstraintsFlagsFUCHSIA flags; + uint64_t sysmemPixelFormat; + uint32_t colorSpaceCount; + const VkSysmemColorSpaceFUCHSIA* pColorSpaces; +} VkImageFormatConstraintsInfoFUCHSIA; + +typedef struct VkImageConstraintsInfoFUCHSIA { + VkStructureType sType; + const void* pNext; + uint32_t formatConstraintsCount; + const VkImageFormatConstraintsInfoFUCHSIA* pFormatConstraints; + VkBufferCollectionConstraintsInfoFUCHSIA bufferCollectionConstraints; + VkImageConstraintsInfoFlagsFUCHSIA flags; +} VkImageConstraintsInfoFUCHSIA; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferCollectionFUCHSIA)(VkDevice device, const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferCollectionFUCHSIA* pCollection); +typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionImageConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo); +typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo); +typedef void (VKAPI_PTR *PFN_vkDestroyBufferCollectionFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks* pAllocator); +typedef VkResult (VKAPI_PTR *PFN_vkGetBufferCollectionPropertiesFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, VkBufferCollectionPropertiesFUCHSIA* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferCollectionFUCHSIA( + VkDevice device, + const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkBufferCollectionFUCHSIA* pCollection); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionImageConstraintsFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionBufferConstraintsFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo); + +VKAPI_ATTR void VKAPI_CALL vkDestroyBufferCollectionFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + const VkAllocationCallbacks* pAllocator); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferCollectionPropertiesFUCHSIA( + VkDevice device, + VkBufferCollectionFUCHSIA collection, + VkBufferCollectionPropertiesFUCHSIA* pProperties); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_ggp.h b/dependencies/vulkan/include/vulkan/vulkan_ggp.h new file mode 100644 index 000000000000..0a8863a14b14 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_ggp.h @@ -0,0 +1,60 @@ +#ifndef VULKAN_GGP_H_ +#define VULKAN_GGP_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_GGP_stream_descriptor_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_GGP_stream_descriptor_surface 1 +#define VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION 1 +#define VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME "VK_GGP_stream_descriptor_surface" +typedef VkFlags VkStreamDescriptorSurfaceCreateFlagsGGP; +typedef struct VkStreamDescriptorSurfaceCreateInfoGGP { + VkStructureType sType; + const void* pNext; + VkStreamDescriptorSurfaceCreateFlagsGGP flags; + GgpStreamDescriptor streamDescriptor; +} VkStreamDescriptorSurfaceCreateInfoGGP; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateStreamDescriptorSurfaceGGP)(VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateStreamDescriptorSurfaceGGP( + VkInstance instance, + const VkStreamDescriptorSurfaceCreateInfoGGP* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_GGP_frame_token is a preprocessor guard. Do not pass it to API calls. +#define VK_GGP_frame_token 1 +#define VK_GGP_FRAME_TOKEN_SPEC_VERSION 1 +#define VK_GGP_FRAME_TOKEN_EXTENSION_NAME "VK_GGP_frame_token" +typedef struct VkPresentFrameTokenGGP { + VkStructureType sType; + const void* pNext; + GgpFrameToken frameToken; +} VkPresentFrameTokenGGP; + + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_ios.h b/dependencies/vulkan/include/vulkan/vulkan_ios.h new file mode 100644 index 000000000000..22ed2c039a6b --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_ios.h @@ -0,0 +1,48 @@ +#ifndef VULKAN_IOS_H_ +#define VULKAN_IOS_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_MVK_ios_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_MVK_ios_surface 1 +#define VK_MVK_IOS_SURFACE_SPEC_VERSION 3 +#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" +typedef VkFlags VkIOSSurfaceCreateFlagsMVK; +typedef struct VkIOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkIOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkIOSSurfaceCreateInfoMVK; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK( + VkInstance instance, + const VkIOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_macos.h b/dependencies/vulkan/include/vulkan/vulkan_macos.h new file mode 100644 index 000000000000..a7f5613a0598 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_macos.h @@ -0,0 +1,48 @@ +#ifndef VULKAN_MACOS_H_ +#define VULKAN_MACOS_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_MVK_macos_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_MVK_macos_surface 1 +#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 3 +#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" +typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; +typedef struct VkMacOSSurfaceCreateInfoMVK { + VkStructureType sType; + const void* pNext; + VkMacOSSurfaceCreateFlagsMVK flags; + const void* pView; +} VkMacOSSurfaceCreateInfoMVK; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK( + VkInstance instance, + const VkMacOSSurfaceCreateInfoMVK* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_metal.h b/dependencies/vulkan/include/vulkan/vulkan_metal.h new file mode 100644 index 000000000000..89a557490647 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_metal.h @@ -0,0 +1,195 @@ +#ifndef VULKAN_METAL_H_ +#define VULKAN_METAL_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_EXT_metal_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_metal_surface 1 +#ifdef __OBJC__ +@class CAMetalLayer; +#else +typedef void CAMetalLayer; +#endif + +#define VK_EXT_METAL_SURFACE_SPEC_VERSION 1 +#define VK_EXT_METAL_SURFACE_EXTENSION_NAME "VK_EXT_metal_surface" +typedef VkFlags VkMetalSurfaceCreateFlagsEXT; +typedef struct VkMetalSurfaceCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkMetalSurfaceCreateFlagsEXT flags; + const CAMetalLayer* pLayer; +} VkMetalSurfaceCreateInfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateMetalSurfaceEXT)(VkInstance instance, const VkMetalSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT( + VkInstance instance, + const VkMetalSurfaceCreateInfoEXT* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + + +// VK_EXT_metal_objects is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_metal_objects 1 +#ifdef __OBJC__ +@protocol MTLDevice; +typedef __unsafe_unretained id MTLDevice_id; +#else +typedef void* MTLDevice_id; +#endif + +#ifdef __OBJC__ +@protocol MTLCommandQueue; +typedef __unsafe_unretained id MTLCommandQueue_id; +#else +typedef void* MTLCommandQueue_id; +#endif + +#ifdef __OBJC__ +@protocol MTLBuffer; +typedef __unsafe_unretained id MTLBuffer_id; +#else +typedef void* MTLBuffer_id; +#endif + +#ifdef __OBJC__ +@protocol MTLTexture; +typedef __unsafe_unretained id MTLTexture_id; +#else +typedef void* MTLTexture_id; +#endif + +typedef struct __IOSurface* IOSurfaceRef; +#ifdef __OBJC__ +@protocol MTLSharedEvent; +typedef __unsafe_unretained id MTLSharedEvent_id; +#else +typedef void* MTLSharedEvent_id; +#endif + +#define VK_EXT_METAL_OBJECTS_SPEC_VERSION 2 +#define VK_EXT_METAL_OBJECTS_EXTENSION_NAME "VK_EXT_metal_objects" + +typedef enum VkExportMetalObjectTypeFlagBitsEXT { + VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT = 0x00000001, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT = 0x00000002, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT = 0x00000004, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT = 0x00000008, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT = 0x00000010, + VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT = 0x00000020, + VK_EXPORT_METAL_OBJECT_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF +} VkExportMetalObjectTypeFlagBitsEXT; +typedef VkFlags VkExportMetalObjectTypeFlagsEXT; +typedef struct VkExportMetalObjectCreateInfoEXT { + VkStructureType sType; + const void* pNext; + VkExportMetalObjectTypeFlagBitsEXT exportObjectType; +} VkExportMetalObjectCreateInfoEXT; + +typedef struct VkExportMetalObjectsInfoEXT { + VkStructureType sType; + const void* pNext; +} VkExportMetalObjectsInfoEXT; + +typedef struct VkExportMetalDeviceInfoEXT { + VkStructureType sType; + const void* pNext; + MTLDevice_id mtlDevice; +} VkExportMetalDeviceInfoEXT; + +typedef struct VkExportMetalCommandQueueInfoEXT { + VkStructureType sType; + const void* pNext; + VkQueue queue; + MTLCommandQueue_id mtlCommandQueue; +} VkExportMetalCommandQueueInfoEXT; + +typedef struct VkExportMetalBufferInfoEXT { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + MTLBuffer_id mtlBuffer; +} VkExportMetalBufferInfoEXT; + +typedef struct VkImportMetalBufferInfoEXT { + VkStructureType sType; + const void* pNext; + MTLBuffer_id mtlBuffer; +} VkImportMetalBufferInfoEXT; + +typedef struct VkExportMetalTextureInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; + VkImageView imageView; + VkBufferView bufferView; + VkImageAspectFlagBits plane; + MTLTexture_id mtlTexture; +} VkExportMetalTextureInfoEXT; + +typedef struct VkImportMetalTextureInfoEXT { + VkStructureType sType; + const void* pNext; + VkImageAspectFlagBits plane; + MTLTexture_id mtlTexture; +} VkImportMetalTextureInfoEXT; + +typedef struct VkExportMetalIOSurfaceInfoEXT { + VkStructureType sType; + const void* pNext; + VkImage image; + IOSurfaceRef ioSurface; +} VkExportMetalIOSurfaceInfoEXT; + +typedef struct VkImportMetalIOSurfaceInfoEXT { + VkStructureType sType; + const void* pNext; + IOSurfaceRef ioSurface; +} VkImportMetalIOSurfaceInfoEXT; + +typedef struct VkExportMetalSharedEventInfoEXT { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkEvent event; + MTLSharedEvent_id mtlSharedEvent; +} VkExportMetalSharedEventInfoEXT; + +typedef struct VkImportMetalSharedEventInfoEXT { + VkStructureType sType; + const void* pNext; + MTLSharedEvent_id mtlSharedEvent; +} VkImportMetalSharedEventInfoEXT; + +typedef void (VKAPI_PTR *PFN_vkExportMetalObjectsEXT)(VkDevice device, VkExportMetalObjectsInfoEXT* pMetalObjectsInfo); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR void VKAPI_CALL vkExportMetalObjectsEXT( + VkDevice device, + VkExportMetalObjectsInfoEXT* pMetalObjectsInfo); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_screen.h b/dependencies/vulkan/include/vulkan/vulkan_screen.h new file mode 100644 index 000000000000..7e84d4d9656f --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_screen.h @@ -0,0 +1,108 @@ +#ifndef VULKAN_SCREEN_H_ +#define VULKAN_SCREEN_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_QNX_screen_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_QNX_screen_surface 1 +#define VK_QNX_SCREEN_SURFACE_SPEC_VERSION 1 +#define VK_QNX_SCREEN_SURFACE_EXTENSION_NAME "VK_QNX_screen_surface" +typedef VkFlags VkScreenSurfaceCreateFlagsQNX; +typedef struct VkScreenSurfaceCreateInfoQNX { + VkStructureType sType; + const void* pNext; + VkScreenSurfaceCreateFlagsQNX flags; + struct _screen_context* context; + struct _screen_window* window; +} VkScreenSurfaceCreateInfoQNX; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateScreenSurfaceQNX)(VkInstance instance, const VkScreenSurfaceCreateInfoQNX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct _screen_window* window); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateScreenSurfaceQNX( + VkInstance instance, + const VkScreenSurfaceCreateInfoQNX* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceScreenPresentationSupportQNX( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct _screen_window* window); +#endif + + +// VK_QNX_external_memory_screen_buffer is a preprocessor guard. Do not pass it to API calls. +#define VK_QNX_external_memory_screen_buffer 1 +#define VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_SPEC_VERSION 1 +#define VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME "VK_QNX_external_memory_screen_buffer" +typedef struct VkScreenBufferPropertiesQNX { + VkStructureType sType; + void* pNext; + VkDeviceSize allocationSize; + uint32_t memoryTypeBits; +} VkScreenBufferPropertiesQNX; + +typedef struct VkScreenBufferFormatPropertiesQNX { + VkStructureType sType; + void* pNext; + VkFormat format; + uint64_t externalFormat; + uint64_t screenUsage; + VkFormatFeatureFlags formatFeatures; + VkComponentMapping samplerYcbcrConversionComponents; + VkSamplerYcbcrModelConversion suggestedYcbcrModel; + VkSamplerYcbcrRange suggestedYcbcrRange; + VkChromaLocation suggestedXChromaOffset; + VkChromaLocation suggestedYChromaOffset; +} VkScreenBufferFormatPropertiesQNX; + +typedef struct VkImportScreenBufferInfoQNX { + VkStructureType sType; + const void* pNext; + struct _screen_buffer* buffer; +} VkImportScreenBufferInfoQNX; + +typedef struct VkExternalFormatQNX { + VkStructureType sType; + void* pNext; + uint64_t externalFormat; +} VkExternalFormatQNX; + +typedef struct VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX { + VkStructureType sType; + void* pNext; + VkBool32 screenBufferImport; +} VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX; + +typedef VkResult (VKAPI_PTR *PFN_vkGetScreenBufferPropertiesQNX)(VkDevice device, const struct _screen_buffer* buffer, VkScreenBufferPropertiesQNX* pProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetScreenBufferPropertiesQNX( + VkDevice device, + const struct _screen_buffer* buffer, + VkScreenBufferPropertiesQNX* pProperties); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_vi.h b/dependencies/vulkan/include/vulkan/vulkan_vi.h new file mode 100644 index 000000000000..c145f4a800fa --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_vi.h @@ -0,0 +1,48 @@ +#ifndef VULKAN_VI_H_ +#define VULKAN_VI_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_NN_vi_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_NN_vi_surface 1 +#define VK_NN_VI_SURFACE_SPEC_VERSION 1 +#define VK_NN_VI_SURFACE_EXTENSION_NAME "VK_NN_vi_surface" +typedef VkFlags VkViSurfaceCreateFlagsNN; +typedef struct VkViSurfaceCreateInfoNN { + VkStructureType sType; + const void* pNext; + VkViSurfaceCreateFlagsNN flags; + void* window; +} VkViSurfaceCreateInfoNN; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN( + VkInstance instance, + const VkViSurfaceCreateInfoNN* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_wayland.h b/dependencies/vulkan/include/vulkan/vulkan_wayland.h new file mode 100644 index 000000000000..ec706a114b51 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_wayland.h @@ -0,0 +1,55 @@ +#ifndef VULKAN_WAYLAND_H_ +#define VULKAN_WAYLAND_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_wayland_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_wayland_surface 1 +#define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface" +typedef VkFlags VkWaylandSurfaceCreateFlagsKHR; +typedef struct VkWaylandSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWaylandSurfaceCreateFlagsKHR flags; + struct wl_display* display; + struct wl_surface* surface; +} VkWaylandSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWaylandSurfaceKHR)(VkInstance instance, const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display* display); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWaylandSurfaceKHR( + VkInstance instance, + const VkWaylandSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWaylandPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + struct wl_display* display); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_win32.h b/dependencies/vulkan/include/vulkan/vulkan_win32.h new file mode 100644 index 000000000000..d7a0b2bab4f1 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_win32.h @@ -0,0 +1,342 @@ +#ifndef VULKAN_WIN32_H_ +#define VULKAN_WIN32_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_win32_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_win32_surface 1 +#define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6 +#define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface" +typedef VkFlags VkWin32SurfaceCreateFlagsKHR; +typedef struct VkWin32SurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkWin32SurfaceCreateFlagsKHR flags; + HINSTANCE hinstance; + HWND hwnd; +} VkWin32SurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateWin32SurfaceKHR)(VkInstance instance, const VkWin32SurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateWin32SurfaceKHR( + VkInstance instance, + const VkWin32SurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex); +#endif + + +// VK_KHR_external_memory_win32 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_memory_win32 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32" +typedef struct VkImportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportMemoryWin32HandleInfoKHR; + +typedef struct VkExportMemoryWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportMemoryWin32HandleInfoKHR; + +typedef struct VkMemoryWin32HandlePropertiesKHR { + VkStructureType sType; + void* pNext; + uint32_t memoryTypeBits; +} VkMemoryWin32HandlePropertiesKHR; + +typedef struct VkMemoryGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkDeviceMemory memory; + VkExternalMemoryHandleTypeFlagBits handleType; +} VkMemoryGetWin32HandleInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleKHR)(VkDevice device, const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandlePropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleKHR( + VkDevice device, + const VkMemoryGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR( + VkDevice device, + VkExternalMemoryHandleTypeFlagBits handleType, + HANDLE handle, + VkMemoryWin32HandlePropertiesKHR* pMemoryWin32HandleProperties); +#endif + + +// VK_KHR_win32_keyed_mutex is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_win32_keyed_mutex 1 +#define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1 +#define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex" +typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeouts; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoKHR; + + + +// VK_KHR_external_semaphore_win32 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_semaphore_win32 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32" +typedef struct VkImportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkSemaphoreImportFlags flags; + VkExternalSemaphoreHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportSemaphoreWin32HandleInfoKHR; + +typedef struct VkExportSemaphoreWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportSemaphoreWin32HandleInfoKHR; + +typedef struct VkD3D12FenceSubmitInfoKHR { + VkStructureType sType; + const void* pNext; + uint32_t waitSemaphoreValuesCount; + const uint64_t* pWaitSemaphoreValues; + uint32_t signalSemaphoreValuesCount; + const uint64_t* pSignalSemaphoreValues; +} VkD3D12FenceSubmitInfoKHR; + +typedef struct VkSemaphoreGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkSemaphore semaphore; + VkExternalSemaphoreHandleTypeFlagBits handleType; +} VkSemaphoreGetWin32HandleInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreWin32HandleKHR)(VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreWin32HandleKHR)(VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreWin32HandleKHR( + VkDevice device, + const VkImportSemaphoreWin32HandleInfoKHR* pImportSemaphoreWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR( + VkDevice device, + const VkSemaphoreGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + + +// VK_KHR_external_fence_win32 is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_external_fence_win32 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1 +#define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32" +typedef struct VkImportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkFenceImportFlags flags; + VkExternalFenceHandleTypeFlagBits handleType; + HANDLE handle; + LPCWSTR name; +} VkImportFenceWin32HandleInfoKHR; + +typedef struct VkExportFenceWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; + LPCWSTR name; +} VkExportFenceWin32HandleInfoKHR; + +typedef struct VkFenceGetWin32HandleInfoKHR { + VkStructureType sType; + const void* pNext; + VkFence fence; + VkExternalFenceHandleTypeFlagBits handleType; +} VkFenceGetWin32HandleInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkImportFenceWin32HandleKHR)(VkDevice device, const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); +typedef VkResult (VKAPI_PTR *PFN_vkGetFenceWin32HandleKHR)(VkDevice device, const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceWin32HandleKHR( + VkDevice device, + const VkImportFenceWin32HandleInfoKHR* pImportFenceWin32HandleInfo); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR( + VkDevice device, + const VkFenceGetWin32HandleInfoKHR* pGetWin32HandleInfo, + HANDLE* pHandle); +#endif + + +// VK_NV_external_memory_win32 is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_external_memory_win32 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1 +#define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32" +typedef struct VkImportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + VkExternalMemoryHandleTypeFlagsNV handleType; + HANDLE handle; +} VkImportMemoryWin32HandleInfoNV; + +typedef struct VkExportMemoryWin32HandleInfoNV { + VkStructureType sType; + const void* pNext; + const SECURITY_ATTRIBUTES* pAttributes; + DWORD dwAccess; +} VkExportMemoryWin32HandleInfoNV; + +typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryWin32HandleNV)(VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE* pHandle); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV( + VkDevice device, + VkDeviceMemory memory, + VkExternalMemoryHandleTypeFlagsNV handleType, + HANDLE* pHandle); +#endif + + +// VK_NV_win32_keyed_mutex is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_win32_keyed_mutex 1 +#define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 2 +#define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex" +typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV { + VkStructureType sType; + const void* pNext; + uint32_t acquireCount; + const VkDeviceMemory* pAcquireSyncs; + const uint64_t* pAcquireKeys; + const uint32_t* pAcquireTimeoutMilliseconds; + uint32_t releaseCount; + const VkDeviceMemory* pReleaseSyncs; + const uint64_t* pReleaseKeys; +} VkWin32KeyedMutexAcquireReleaseInfoNV; + + + +// VK_EXT_full_screen_exclusive is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_full_screen_exclusive 1 +#define VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION 4 +#define VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME "VK_EXT_full_screen_exclusive" + +typedef enum VkFullScreenExclusiveEXT { + VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT = 0, + VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT = 1, + VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT = 2, + VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT = 3, + VK_FULL_SCREEN_EXCLUSIVE_MAX_ENUM_EXT = 0x7FFFFFFF +} VkFullScreenExclusiveEXT; +typedef struct VkSurfaceFullScreenExclusiveInfoEXT { + VkStructureType sType; + void* pNext; + VkFullScreenExclusiveEXT fullScreenExclusive; +} VkSurfaceFullScreenExclusiveInfoEXT; + +typedef struct VkSurfaceCapabilitiesFullScreenExclusiveEXT { + VkStructureType sType; + void* pNext; + VkBool32 fullScreenExclusiveSupported; +} VkSurfaceCapabilitiesFullScreenExclusiveEXT; + +typedef struct VkSurfaceFullScreenExclusiveWin32InfoEXT { + VkStructureType sType; + const void* pNext; + HMONITOR hmonitor; +} VkSurfaceFullScreenExclusiveWin32InfoEXT; + +typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes); +typedef VkResult (VKAPI_PTR *PFN_vkAcquireFullScreenExclusiveModeEXT)(VkDevice device, VkSwapchainKHR swapchain); +typedef VkResult (VKAPI_PTR *PFN_vkReleaseFullScreenExclusiveModeEXT)(VkDevice device, VkSwapchainKHR swapchain); +typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModes2EXT)(VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR* pModes); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModes2EXT( + VkPhysicalDevice physicalDevice, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); + +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireFullScreenExclusiveModeEXT( + VkDevice device, + VkSwapchainKHR swapchain); + +VKAPI_ATTR VkResult VKAPI_CALL vkReleaseFullScreenExclusiveModeEXT( + VkDevice device, + VkSwapchainKHR swapchain); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModes2EXT( + VkDevice device, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkDeviceGroupPresentModeFlagsKHR* pModes); +#endif + + +// VK_NV_acquire_winrt_display is a preprocessor guard. Do not pass it to API calls. +#define VK_NV_acquire_winrt_display 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION 1 +#define VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME "VK_NV_acquire_winrt_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireWinrtDisplayNV)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetWinrtDisplayNV)(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetWinrtDisplayNV( + VkPhysicalDevice physicalDevice, + uint32_t deviceRelativeId, + VkDisplayKHR* pDisplay); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_xcb.h b/dependencies/vulkan/include/vulkan/vulkan_xcb.h new file mode 100644 index 000000000000..cdf6b5269f14 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_xcb.h @@ -0,0 +1,56 @@ +#ifndef VULKAN_XCB_H_ +#define VULKAN_XCB_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_xcb_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_xcb_surface 1 +#define VK_KHR_XCB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface" +typedef VkFlags VkXcbSurfaceCreateFlagsKHR; +typedef struct VkXcbSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXcbSurfaceCreateFlagsKHR flags; + xcb_connection_t* connection; + xcb_window_t window; +} VkXcbSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXcbSurfaceKHR)(VkInstance instance, const VkXcbSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t* connection, xcb_visualid_t visual_id); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXcbSurfaceKHR( + VkInstance instance, + const VkXcbSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXcbPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + xcb_connection_t* connection, + xcb_visualid_t visual_id); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_xlib.h b/dependencies/vulkan/include/vulkan/vulkan_xlib.h new file mode 100644 index 000000000000..b3c3e27d7775 --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_xlib.h @@ -0,0 +1,56 @@ +#ifndef VULKAN_XLIB_H_ +#define VULKAN_XLIB_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_KHR_xlib_surface is a preprocessor guard. Do not pass it to API calls. +#define VK_KHR_xlib_surface 1 +#define VK_KHR_XLIB_SURFACE_SPEC_VERSION 6 +#define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface" +typedef VkFlags VkXlibSurfaceCreateFlagsKHR; +typedef struct VkXlibSurfaceCreateInfoKHR { + VkStructureType sType; + const void* pNext; + VkXlibSurfaceCreateFlagsKHR flags; + Display* dpy; + Window window; +} VkXlibSurfaceCreateInfoKHR; + +typedef VkResult (VKAPI_PTR *PFN_vkCreateXlibSurfaceKHR)(VkInstance instance, const VkXlibSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface); +typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display* dpy, VisualID visualID); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkCreateXlibSurfaceKHR( + VkInstance instance, + const VkXlibSurfaceCreateInfoKHR* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkSurfaceKHR* pSurface); + +VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceXlibPresentationSupportKHR( + VkPhysicalDevice physicalDevice, + uint32_t queueFamilyIndex, + Display* dpy, + VisualID visualID); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/include/vulkan/vulkan_xlib_xrandr.h b/dependencies/vulkan/include/vulkan/vulkan_xlib_xrandr.h new file mode 100644 index 000000000000..8e99190b4ffe --- /dev/null +++ b/dependencies/vulkan/include/vulkan/vulkan_xlib_xrandr.h @@ -0,0 +1,46 @@ +#ifndef VULKAN_XLIB_XRANDR_H_ +#define VULKAN_XLIB_XRANDR_H_ 1 + +/* +** Copyright 2015-2024 The Khronos Group Inc. +** +** SPDX-License-Identifier: Apache-2.0 +*/ + +/* +** This header is generated from the Khronos Vulkan XML API Registry. +** +*/ + + +#ifdef __cplusplus +extern "C" { +#endif + + + +// VK_EXT_acquire_xlib_display is a preprocessor guard. Do not pass it to API calls. +#define VK_EXT_acquire_xlib_display 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1 +#define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display" +typedef VkResult (VKAPI_PTR *PFN_vkAcquireXlibDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, VkDisplayKHR display); +typedef VkResult (VKAPI_PTR *PFN_vkGetRandROutputDisplayEXT)(VkPhysicalDevice physicalDevice, Display* dpy, RROutput rrOutput, VkDisplayKHR* pDisplay); + +#ifndef VK_NO_PROTOTYPES +VKAPI_ATTR VkResult VKAPI_CALL vkAcquireXlibDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + VkDisplayKHR display); + +VKAPI_ATTR VkResult VKAPI_CALL vkGetRandROutputDisplayEXT( + VkPhysicalDevice physicalDevice, + Display* dpy, + RROutput rrOutput, + VkDisplayKHR* pDisplay); +#endif + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/dependencies/vulkan/share/cmake/VulkanHeaders/VulkanHeadersConfig.cmake b/dependencies/vulkan/share/cmake/VulkanHeaders/VulkanHeadersConfig.cmake new file mode 100644 index 000000000000..09776f349851 --- /dev/null +++ b/dependencies/vulkan/share/cmake/VulkanHeaders/VulkanHeadersConfig.cmake @@ -0,0 +1,106 @@ +# Generated by CMake + +if("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" LESS 2.8) + message(FATAL_ERROR "CMake >= 2.8.0 required") +endif() +if(CMAKE_VERSION VERSION_LESS "3.0.0") + message(FATAL_ERROR "CMake >= 3.0.0 required") +endif() +cmake_policy(PUSH) +cmake_policy(VERSION 3.0.0...3.28) +#---------------------------------------------------------------- +# Generated CMake target import file. +#---------------------------------------------------------------- + +# Commands may need to know the format version. +set(CMAKE_IMPORT_FILE_VERSION 1) + +# Protect against multiple inclusion, which would fail when already imported targets are added once more. +set(_cmake_targets_defined "") +set(_cmake_targets_not_defined "") +set(_cmake_expected_targets "") +foreach(_cmake_expected_target IN ITEMS Vulkan::Headers) + list(APPEND _cmake_expected_targets "${_cmake_expected_target}") + if(TARGET "${_cmake_expected_target}") + list(APPEND _cmake_targets_defined "${_cmake_expected_target}") + else() + list(APPEND _cmake_targets_not_defined "${_cmake_expected_target}") + endif() +endforeach() +unset(_cmake_expected_target) +if(_cmake_targets_defined STREQUAL _cmake_expected_targets) + unset(_cmake_targets_defined) + unset(_cmake_targets_not_defined) + unset(_cmake_expected_targets) + unset(CMAKE_IMPORT_FILE_VERSION) + cmake_policy(POP) + return() +endif() +if(NOT _cmake_targets_defined STREQUAL "") + string(REPLACE ";" ", " _cmake_targets_defined_text "${_cmake_targets_defined}") + string(REPLACE ";" ", " _cmake_targets_not_defined_text "${_cmake_targets_not_defined}") + message(FATAL_ERROR "Some (but not all) targets in this export set were already defined.\nTargets Defined: ${_cmake_targets_defined_text}\nTargets not yet defined: ${_cmake_targets_not_defined_text}\n") +endif() +unset(_cmake_targets_defined) +unset(_cmake_targets_not_defined) +unset(_cmake_expected_targets) + + +# Compute the installation prefix relative to this file. +get_filename_component(_IMPORT_PREFIX "${CMAKE_CURRENT_LIST_FILE}" PATH) +get_filename_component(_IMPORT_PREFIX "${_IMPORT_PREFIX}" PATH) +get_filename_component(_IMPORT_PREFIX "${_IMPORT_PREFIX}" PATH) +get_filename_component(_IMPORT_PREFIX "${_IMPORT_PREFIX}" PATH) +if(_IMPORT_PREFIX STREQUAL "/") + set(_IMPORT_PREFIX "") +endif() + +# Create imported target Vulkan::Headers +add_library(Vulkan::Headers INTERFACE IMPORTED) + +set_target_properties(Vulkan::Headers PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${_IMPORT_PREFIX}/include" +) + +# Load information for each installed configuration. +file(GLOB _cmake_config_files "${CMAKE_CURRENT_LIST_DIR}/VulkanHeadersConfig-*.cmake") +foreach(_cmake_config_file IN LISTS _cmake_config_files) + include("${_cmake_config_file}") +endforeach() +unset(_cmake_config_file) +unset(_cmake_config_files) + +# Cleanup temporary variables. +set(_IMPORT_PREFIX) + +# Loop over all imported files and verify that they actually exist +foreach(_cmake_target IN LISTS _cmake_import_check_targets) + if(CMAKE_VERSION VERSION_LESS "3.28" + OR NOT DEFINED _cmake_import_check_xcframework_for_${_cmake_target} + OR NOT IS_DIRECTORY "${_cmake_import_check_xcframework_for_${_cmake_target}}") + foreach(_cmake_file IN LISTS "_cmake_import_check_files_for_${_cmake_target}") + if(NOT EXISTS "${_cmake_file}") + message(FATAL_ERROR "The imported target \"${_cmake_target}\" references the file + \"${_cmake_file}\" +but this file does not exist. Possible reasons include: +* The file was deleted, renamed, or moved to another location. +* An install or uninstall procedure did not complete successfully. +* The installation package was faulty and contained + \"${CMAKE_CURRENT_LIST_FILE}\" +but not all the files it references. +") + endif() + endforeach() + endif() + unset(_cmake_file) + unset("_cmake_import_check_files_for_${_cmake_target}") +endforeach() +unset(_cmake_target) +unset(_cmake_import_check_targets) + +# This file does not depend on other imported targets which have +# been exported from the same project but in a separate export set. + +# Commands beyond this point should not need to know the version. +set(CMAKE_IMPORT_FILE_VERSION) +cmake_policy(POP) diff --git a/dependencies/vulkan/share/cmake/VulkanHeaders/VulkanHeadersConfigVersion.cmake b/dependencies/vulkan/share/cmake/VulkanHeaders/VulkanHeadersConfigVersion.cmake new file mode 100644 index 000000000000..a6fdead99c3e --- /dev/null +++ b/dependencies/vulkan/share/cmake/VulkanHeaders/VulkanHeadersConfigVersion.cmake @@ -0,0 +1,54 @@ +# This is a basic version file for the Config-mode of find_package(). +# It is used by write_basic_package_version_file() as input file for configure_file() +# to create a version-file which can be installed along a config.cmake file. +# +# The created file sets PACKAGE_VERSION_EXACT if the current version string and +# the requested version string are exactly the same and it sets +# PACKAGE_VERSION_COMPATIBLE if the current version is >= requested version, +# but only if the requested major version is the same as the current one. +# The variable CVF_VERSION must be set before calling configure_file(). + + +set(PACKAGE_VERSION "1.3.296") + +if(PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION) + set(PACKAGE_VERSION_COMPATIBLE FALSE) +else() + + if("1.3.296" MATCHES "^([0-9]+)\\.") + set(CVF_VERSION_MAJOR "${CMAKE_MATCH_1}") + if(NOT CVF_VERSION_MAJOR VERSION_EQUAL 0) + string(REGEX REPLACE "^0+" "" CVF_VERSION_MAJOR "${CVF_VERSION_MAJOR}") + endif() + else() + set(CVF_VERSION_MAJOR "1.3.296") + endif() + + if(PACKAGE_FIND_VERSION_RANGE) + # both endpoints of the range must have the expected major version + math (EXPR CVF_VERSION_MAJOR_NEXT "${CVF_VERSION_MAJOR} + 1") + if (NOT PACKAGE_FIND_VERSION_MIN_MAJOR STREQUAL CVF_VERSION_MAJOR + OR ((PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "INCLUDE" AND NOT PACKAGE_FIND_VERSION_MAX_MAJOR STREQUAL CVF_VERSION_MAJOR) + OR (PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "EXCLUDE" AND NOT PACKAGE_FIND_VERSION_MAX VERSION_LESS_EQUAL CVF_VERSION_MAJOR_NEXT))) + set(PACKAGE_VERSION_COMPATIBLE FALSE) + elseif(PACKAGE_FIND_VERSION_MIN_MAJOR STREQUAL CVF_VERSION_MAJOR + AND ((PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "INCLUDE" AND PACKAGE_VERSION VERSION_LESS_EQUAL PACKAGE_FIND_VERSION_MAX) + OR (PACKAGE_FIND_VERSION_RANGE_MAX STREQUAL "EXCLUDE" AND PACKAGE_VERSION VERSION_LESS PACKAGE_FIND_VERSION_MAX))) + set(PACKAGE_VERSION_COMPATIBLE TRUE) + else() + set(PACKAGE_VERSION_COMPATIBLE FALSE) + endif() + else() + if(PACKAGE_FIND_VERSION_MAJOR STREQUAL CVF_VERSION_MAJOR) + set(PACKAGE_VERSION_COMPATIBLE TRUE) + else() + set(PACKAGE_VERSION_COMPATIBLE FALSE) + endif() + + if(PACKAGE_FIND_VERSION STREQUAL PACKAGE_VERSION) + set(PACKAGE_VERSION_EXACT TRUE) + endif() + endif() +endif() + + diff --git a/doc/Vulkan.md b/doc/Vulkan.md index fa626afc37d2..d1cb6c41aaff 100644 --- a/doc/Vulkan.md +++ b/doc/Vulkan.md @@ -75,7 +75,7 @@ $ sudo add-apt-repository ppa:graphics-drivers/ppa $ sudo apt update $ sudo apt upgrade # - replace ### with latest driver release (e.g. 515) -$ sudo apt install nvidia-driver-### nvidia-settings vulkan vulkan-tools +$ sudo apt install nvidia-driver-### nvidia-settings libvulkan1 vulkan-tools ``` Note that only valid drivers for your system should be installed since there are @@ -237,6 +237,26 @@ validated and disassembled via the SPIR-V tools: https://github.com/KhronosGroup/SPIRV-Tools +In addition to the SPIR-V Tools, you may also wish to install the Khronos Validation +Layers which provide an exhaustive suite of runtime checks that can be injected +by adding `VK_LAYER_KHRONOS_validation` to the `VK_INSTANCE_LAYERS=` environment +variable. + +To install the validation layers and the SPIR-V tools on Ubuntu v22.04: + +``` +$ sudo apt install vulkan-validationlayers vulkan-validationlayers-dev spirv-tools +``` + +To test the validation layer, you can prepend your shell command for any Vulkan +enabled binary with the appropriate environment settings. For example, +you can run one of the JIT-enabled correctness tests w/debug output and validation +layers enabled like so: + +``` +$ VK_INSTANCE_LAYERS=VK_LAYER_KHRONOS_validation HL_JIT_TARGET=host-vulkan-vk_int8-vk_int16-vk_int64-vk_float16-vk_float64-vk_v13-debug ./build/test/correctness/correctness_hello_gpu +``` + # Current Status All correctness tests are now passing on tested configs for Linux & Windows using the target `host-vulkan-vk_int8-vk_int16-vk_int64-vk_float16-vk_float64-vk_v13` on LLVM v14.x. diff --git a/pyproject.toml b/pyproject.toml index b20a83567cfc..ade383fc5985 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -103,7 +103,7 @@ cmake.define.Halide_WASM_BACKEND = "OFF" github_url = "https://github.com/halide/Halide/" [tool.tbump.version] -current = "19.0.0" +current = "20.0.0" regex = '(?P\d+)\.(?P\d+)\.(?P\d+)' [tool.tbump.git] diff --git a/python_bindings/CMakeLists.txt b/python_bindings/CMakeLists.txt index 2d3a7ac77da9..189d82e0405e 100644 --- a/python_bindings/CMakeLists.txt +++ b/python_bindings/CMakeLists.txt @@ -1,5 +1,5 @@ cmake_minimum_required(VERSION 3.28) -project(Halide_Python VERSION 19.0.0) +project(Halide_Python VERSION 20.0.0) if (PROJECT_IS_TOP_LEVEL) enable_testing() diff --git a/src/Bounds.cpp b/src/Bounds.cpp index d080b291b50f..841a40639f3e 100644 --- a/src/Bounds.cpp +++ b/src/Bounds.cpp @@ -1857,20 +1857,6 @@ Interval bounds_of_expr_in_scope_with_indent(const Expr &expr, const Scope &scope, const FuncValueBounds &fb, bool const_bound) { diff --git a/src/CodeGen_Vulkan_Dev.cpp b/src/CodeGen_Vulkan_Dev.cpp index 559f0bb932f4..fea2957c9498 100644 --- a/src/CodeGen_Vulkan_Dev.cpp +++ b/src/CodeGen_Vulkan_Dev.cpp @@ -1,5 +1,6 @@ #include -#include // for dump to file +#include // for dump to file +#include // for dump to file #include #include @@ -114,9 +115,9 @@ class CodeGen_Vulkan_Dev : public CodeGen_GPU_Dev { // Top-level function for adding kernels void add_kernel(const Stmt &s, const std::string &name, const std::vector &args); - void init_module(); - void compile(std::vector &binary); - void dump() const; + void init_spirv_module(); + void encode_spirv_module(std::vector &binary); + void dump_spirv_module() const; // Encode the descriptor sets into a sidecar which will be added // as a header to the module prior to the actual SPIR-V binary @@ -282,10 +283,32 @@ class CodeGen_Vulkan_Dev : public CodeGen_GPU_Dev { // Target for codegen Target target; + }; - } emitter; + // Current target for codegen + Target current_target; + // Current kernel name for CodeGen std::string current_kernel_name; + + // In order to avoid using up all descriptor sets for complicated pipelines, + // we will encode each Kernel entry-point into it's own SPIR-V module and + // bind them as separate shaders to avoid running out of resources on + // constrained devices. + struct KernelModule { + std::string kernel_name; + std::vector spirv_module; // header + binary + }; + using KernelModuleTable = std::vector; + KernelModuleTable kernel_module_table; + + // merge the contents of the kernel module table into a single binary + // containing a header followed by the SPIR-V modules for each kernel + void encode_module(std::vector &module); + + // dump the contents of the combined module, outputting separate + // SPIR-V binary files for each kernel + void dump_module(const std::vector &module); }; // Check if all loads and stores to the member 'buffer' are dense, aligned, and @@ -2148,23 +2171,23 @@ void CodeGen_Vulkan_Dev::SPIRV_Emitter::visit(const Shuffle *op) { } void CodeGen_Vulkan_Dev::SPIRV_Emitter::visit(const VectorReduce *) { - internal_error << "CodeGen_Vulkan_Dev::SPIRV_Emitter::visit(const VectorReduce *): VectorReduce not implemented for codegen\n"; + user_assert(false) << "VectorReduce is not supported inside Vulkan kernels"; } void CodeGen_Vulkan_Dev::SPIRV_Emitter::visit(const Prefetch *) { - internal_error << "CodeGen_Vulkan_Dev::SPIRV_Emitter::visit(const Prefetch *): Prefetch not implemented for codegen\n"; + user_assert(false) << "Prefetch is not supported inside Vulkan kernels"; } void CodeGen_Vulkan_Dev::SPIRV_Emitter::visit(const Fork *) { - internal_error << "void CodeGen_Vulkan_Dev::SPIRV_Emitter::visit(const Fork *): Fork not implemented for codegen"; + user_assert(false) << "Fork is not supported inside Vulkan kernels"; } void CodeGen_Vulkan_Dev::SPIRV_Emitter::visit(const Acquire *) { - internal_error << "void CodeGen_Vulkan_Dev::SPIRV_Emitter::visit(const Acquire *): Acquire not implemented for codegen"; + user_assert(false) << "Acquire is not supported inside Vulkan kernels"; } void CodeGen_Vulkan_Dev::SPIRV_Emitter::visit(const Atomic *) { - internal_error << "void CodeGen_Vulkan_Dev::SPIRV_Emitter::visit(const Atomic *): Atomic not implemented for codegen"; + user_assert(false) << "Atomic updates are not supported inside Vulkan kernels"; } void CodeGen_Vulkan_Dev::SPIRV_Emitter::visit_unary_op(SpvOp op_code, Type t, const Expr &a) { @@ -2243,7 +2266,7 @@ void CodeGen_Vulkan_Dev::SPIRV_Emitter::reset() { reset_workgroup_size(); } -void CodeGen_Vulkan_Dev::SPIRV_Emitter::init_module() { +void CodeGen_Vulkan_Dev::SPIRV_Emitter::init_spirv_module() { reset(); if (target.has_feature(Target::VulkanV13)) { @@ -2286,6 +2309,11 @@ std::vector encode_header_string(const std::string &str) { void CodeGen_Vulkan_Dev::SPIRV_Emitter::encode_header(SpvBinary &spirv_header) { debug(2) << "CodeGen_Vulkan_Dev::SPIRV_Emitter::encode_header\n"; + // NOTE: This header is a pre-amble to the actual SPIR-V module that's passed + // to the driver. It describes the descriptor sets and other metadata + // that's necessary to bind the compiled shader. This is combined with + // the actual SPIR-V module to form a "kernel module". + // // Encode a sidecar for the module that lists the descriptor sets // corresponding to each entry point contained in the module. // @@ -2723,8 +2751,8 @@ void CodeGen_Vulkan_Dev::SPIRV_Emitter::declare_device_args(const Stmt &s, uint3 descriptor_set_table.push_back(descriptor_set); } -void CodeGen_Vulkan_Dev::SPIRV_Emitter::compile(std::vector &module) { - debug(2) << "CodeGen_Vulkan_Dev::SPIRV_Emitter::compile\n"; +void CodeGen_Vulkan_Dev::SPIRV_Emitter::encode_spirv_module(std::vector &module) { + debug(2) << "CodeGen_Vulkan_Dev::SPIRV_Emitter::encode_spirv_module\n"; // First encode the descriptor set bindings for each entry point // as a sidecar which we will add as a preamble header to the actual @@ -2825,19 +2853,19 @@ void CodeGen_Vulkan_Dev::SPIRV_Emitter::add_kernel(const Stmt &s, storage_access_map.clear(); } -void CodeGen_Vulkan_Dev::SPIRV_Emitter::dump() const { - debug(2) << "CodeGen_Vulkan_Dev::SPIRV_Emitter::dump()\n"; +void CodeGen_Vulkan_Dev::SPIRV_Emitter::dump_spirv_module() const { + debug(2) << "CodeGen_Vulkan_Dev::SPIRV_Emitter::dump_spirv_module()\n"; std::cerr << builder.current_module(); } CodeGen_Vulkan_Dev::CodeGen_Vulkan_Dev(Target t) - : emitter(t) { + : current_target(t) { // Empty } void CodeGen_Vulkan_Dev::init_module() { debug(2) << "CodeGen_Vulkan_Dev::init_module\n"; - emitter.init_module(); + kernel_module_table.clear(); } void CodeGen_Vulkan_Dev::add_kernel(Stmt stmt, @@ -2853,21 +2881,175 @@ void CodeGen_Vulkan_Dev::add_kernel(Stmt stmt, << stmt; current_kernel_name = name; + + // Create the SPIR-V emitter + SPIRV_Emitter emitter(current_target); + emitter.init_spirv_module(); emitter.add_kernel(stmt, name, args); - // dump the SPIRV file if requested - if (getenv("HL_SPIRV_DUMP_FILE")) { - dump(); + // Encode the SPIR-V module (header + binary) + std::vector spirv_module; + emitter.encode_spirv_module(spirv_module); + + // Dump the SPIR-V if debug is enabled + if (debug::debug_level() >= 2) { + emitter.dump_spirv_module(); } + + // Copy the SPIR-V module into the Kernel Module table + KernelModule kernel_module; + kernel_module.kernel_name = name; + kernel_module.spirv_module = spirv_module; + kernel_module_table.push_back(kernel_module); } std::vector CodeGen_Vulkan_Dev::compile_to_src() { debug(2) << "CodeGen_Vulkan_Dev::compile_to_src\n"; + std::vector module; - emitter.compile(module); + encode_module(module); + + // dump the SPIRV file if requested + if (getenv("HL_SPIRV_DUMP_FILE")) { + dump_module(module); + } + return module; } +void CodeGen_Vulkan_Dev::encode_module(std::vector &module) { + + debug(2) << "CodeGen_Vulkan_Dev::encode_module\n"; + + // NOTE: The module generated by this method is an amalgamation of all the + // "kernel modules". It consists of a simple header indicating the + // number of "kernel modules" and their respective sizes, followed by + // a "SPIR-V module" for each kernel. Each "SPIR-V module" contains a + // pre-amble header describing the descriptor sets and metadata necessary + // for binding the shader, followed by the actual SPIR-V binary that's + // submitted to the driver. + // + // Previously, we encoded everything into a single "SPIR-V module" with + // entry-points for each kernel in the pipeline, but for complex pipelines, + // this could easily exceed the number of available descriptor sets available + // at runtime on resource constrained devices. As a compromise, we only + // generate a single SPIR-V module per entry-point to reduce the number + // of required descriptor sets needed to bind the shader. However, this + // requires us to merge them into an amalgation, which is why this two-level + // encoding exists. + // + // + // [0] Number of Kernel Modules (uint32_t) + // ... For each kernel module ... + // ... [0] Byte count indicating size of "Kernel Module" entry (uint32_t) + // ... [1] Length of entry point name (padded to nearest word size) + // ... [*] Entry point string data (padded with null chars) + // [1] Kernel Module Table + // ... For each kernel module ... + // ....[0] The SPIR-V Module for the kernel + // ....... (see encode_spirv_module() for details) + // + // NOTE: Halide's Vulkan runtime consumes this module prior to compiling. + // + // Both vk_decode_shader_bindings() and vk_compile_shader_module() will + // need to be updated if this encoding ever changes! + // + + // Nothing to do if table is empty + if (kernel_module_table.empty()) { + return; + } + + // Encode a module header consisting of the number of kernels, followed by the binary size of each + size_t binary_bytes = 0; + SpvBinary module_header; + uint32_t kernel_count = (uint32_t)kernel_module_table.size(); + debug(1) << " kernel_count = " << kernel_count << "\n"; + + // [0] Number of Kernel Modules (uint32_t) + module_header.push_back(kernel_count); + + // For each kernel module ... + uint32_t n = 0; + for (const KernelModule &kernel_module : kernel_module_table) { + + // [0] Byte count indicating size of "Kernel Module" entry (uint32_t) + uint32_t spirv_module_size = (uint32_t)kernel_module.spirv_module.size(); + debug(1) << " spirv_module_size[" << n++ << "] = " << spirv_module_size << " bytes\n"; + module_header.push_back(spirv_module_size); + binary_bytes += spirv_module_size; + + // encode the kernel name into an array of chars (padded to the next word entry) + std::vector kernel_name = encode_header_string(kernel_module.kernel_name); + uint32_t kernel_name_entries = (uint32_t)(kernel_name.size() / sizeof(uint32_t)); + + // [1] Length of entry point name (padded to nearest word size) + module_header.push_back(kernel_name_entries); + + // [*] Entry point string data (padded with null chars) + module_header.insert(module_header.end(), (const uint32_t *)kernel_name.data(), (const uint32_t *)(kernel_name.data() + kernel_name.size())); + } + + size_t header_bytes = module_header.size() * sizeof(uint32_t); + + debug(2) << " encoding module (" + << "header_size: " << (uint32_t)(header_bytes) << ", " + << "binary_size: " << (uint32_t)(binary_bytes) << ")\n"; + + // Combine the header and each kernel module into the binary we will be returning + module.reserve(header_bytes + binary_bytes); + module.insert(module.end(), (const char *)module_header.data(), (const char *)(module_header.data() + module_header.size())); + for (const KernelModule &kernel_module : kernel_module_table) { + module.insert(module.end(), (const char *)kernel_module.spirv_module.data(), (const char *)(kernel_module.spirv_module.data() + kernel_module.spirv_module.size())); + } +} + +void CodeGen_Vulkan_Dev::dump_module(const std::vector &module) { + // Get the dump file name from the env (default to out.spv if unspecified) + const char *dump_file = getenv("HL_SPIRV_DUMP_FILE") ? getenv("HL_SPIRV_DUMP_FILE") : "out.spv"; + std::filesystem::path dump_file_path(dump_file); + + // Determine the number of kernels and their binary sizes + uint32_t word_offset = 0; + const uint32_t *module_header = (const uint32_t *)(module.data()); + uint32_t kernel_count = module_header[word_offset++]; + std::vector binary_sizes; + std::vector kernel_names; + for (uint32_t i = 0; (i < kernel_count) && ((word_offset * sizeof(uint32_t)) < module.size()); ++i) { + binary_sizes.push_back(module_header[word_offset++]); + + uint32_t kernel_name_entry_size = module_header[word_offset++]; + kernel_names.push_back((const char *)(module_header + word_offset)); + + // Skip past the kernel name + word_offset += kernel_name_entry_size; + } + + // Dump the SPIR-V binary for each kernel as a separate file + size_t byte_offset = word_offset * sizeof(uint32_t); + for (uint32_t i = 0; (i < kernel_count) && (byte_offset < module.size()); ++i) { + + // Skip the header and only output the SPIR-V binary for the kernel + const uint32_t *decode = (const uint32_t *)(module.data() + byte_offset); + uint32_t header_word_count = decode[0]; + size_t header_size = header_word_count * sizeof(uint32_t); + const uint32_t *spirv_ptr = (decode + header_word_count); + size_t spirv_size = (binary_sizes[i] - header_size); + + // Add the kernel index to the dump filename + std::string dump_kernel_name = std::string(kernel_names[i]); + std::string dump_kernel_file = dump_file_path.stem().string(); + dump_kernel_file += "_k" + std::to_string(i) + dump_kernel_name + dump_file_path.extension().string(); + + debug(1) << "Vulkan: Dumping SPIRV module to file: '" << dump_kernel_file << "'\n"; + std::ofstream f(dump_kernel_file.c_str(), std::ios::out | std::ios::binary); + f.write((const char *)(spirv_ptr), spirv_size); + f.close(); + + byte_offset += binary_sizes[i]; + } +} + std::string CodeGen_Vulkan_Dev::get_current_kernel_name() { return current_kernel_name; } @@ -2877,23 +3059,9 @@ std::string CodeGen_Vulkan_Dev::print_gpu_name(const std::string &name) { } void CodeGen_Vulkan_Dev::dump() { - std::vector module = compile_to_src(); - - // Print the contents of the compiled SPIR-V module - emitter.dump(); - - // Skip the header and only output the SPIR-V binary - const uint32_t *decode = (const uint32_t *)(module.data()); - uint32_t header_word_count = decode[0]; - size_t header_size = header_word_count * sizeof(uint32_t); - const uint32_t *binary_ptr = (decode + header_word_count); - size_t binary_size = (module.size() - header_size); - - const char *filename = getenv("HL_SPIRV_DUMP_FILE") ? getenv("HL_SPIRV_DUMP_FILE") : "out.spv"; - debug(1) << "Vulkan: Dumping SPIRV module to file: '" << filename << "'\n"; - std::ofstream f(filename, std::ios::out | std::ios::binary); - f.write((const char *)(binary_ptr), binary_size); - f.close(); + std::vector module; + encode_module(module); + dump_module(module); } } // namespace @@ -2917,4 +3085,4 @@ std::unique_ptr new_CodeGen_Vulkan_Dev(const Target &target) { } // namespace Internal } // namespace Halide -#endif // WITH_SPIRV +#endif // WITH_SPIRV \ No newline at end of file diff --git a/src/ConstantBounds.cpp b/src/ConstantBounds.cpp index 11d1a42133a9..164678e2554c 100644 --- a/src/ConstantBounds.cpp +++ b/src/ConstantBounds.cpp @@ -73,7 +73,6 @@ ConstantInterval bounds_helper(const Expr &e, ScopedBinding bind(scope, op->name, recurse(op->value)); return recurse(op->body); } else if (const Call *op = e.as()) { - ConstantInterval result; if (op->is_intrinsic(Call::abs)) { return abs(recurse(op->args[0])); } else if (op->is_intrinsic(Call::absd)) { diff --git a/src/ConstantInterval.cpp b/src/ConstantInterval.cpp index 8bdeec69325c..e65b709efe46 100644 --- a/src/ConstantInterval.cpp +++ b/src/ConstantInterval.cpp @@ -7,8 +7,6 @@ namespace Halide { namespace Internal { -ConstantInterval::ConstantInterval() = default; - ConstantInterval::ConstantInterval(int64_t min, int64_t max) : min(min), max(max), min_defined(true), max_defined(true) { internal_assert(min <= max); diff --git a/src/ConstantInterval.h b/src/ConstantInterval.h index daa6f0f4dbe0..cc3e6893b3b1 100644 --- a/src/ConstantInterval.h +++ b/src/ConstantInterval.h @@ -22,7 +22,7 @@ struct ConstantInterval { bool min_defined = false, max_defined = false; /* A default-constructed Interval is everything */ - ConstantInterval(); + ConstantInterval() = default; /** Construct an interval from a lower and upper bound. */ ConstantInterval(int64_t min, int64_t max); diff --git a/src/FindIntrinsics.cpp b/src/FindIntrinsics.cpp index f188d8fdd89f..9f1c702d4c6a 100644 --- a/src/FindIntrinsics.cpp +++ b/src/FindIntrinsics.cpp @@ -516,8 +516,12 @@ class FindIntrinsics : public IRMutator { return IRMutator::visit(op); } - Expr value = mutate(op->value); + return visit_cast(op, mutate(op->value)); + } + // Isolated in its own function to keep the (large) stack frame off the + // recursive path. + HALIDE_NEVER_INLINE Expr visit_cast(const Cast *op, Expr &&value) { // This mutator can generate redundant casts. We can't use the simplifier because it // undoes some of the intrinsic lowering here, and it causes some problems due to // factoring (instead of distributing) constants. @@ -550,6 +554,7 @@ class FindIntrinsics : public IRMutator { auto is_x_same_uint = op->type.is_uint() && is_uint(x, bits); auto is_x_same_int_or_uint = is_x_same_int || is_x_same_uint; auto x_y_same_sign = (is_int(x) && is_int(y)) || (is_uint(x) && is_uint(y)); + if ( // Saturating patterns rewrite(max(min(widening_add(x, y), upper), lower), @@ -566,11 +571,11 @@ class FindIntrinsics : public IRMutator { rewrite(min(widening_add(x, y), upper), saturating_add(x, y), - op->type.is_uint() && is_x_same_uint) || + is_x_same_uint) || rewrite(max(widening_sub(x, y), lower), saturating_sub(x, y), - op->type.is_uint() && is_x_same_uint) || + is_x_same_uint) || // Saturating narrow patterns. rewrite(max(min(x, upper), lower), @@ -721,10 +726,17 @@ class FindIntrinsics : public IRMutator { op = mutated.as(); if (!op) { return mutated; + } else { + return visit_call(op); } + } + // Isolated in its own function to keep the (large) stack frame off the + // recursive path. The Call node has already been mutated by the base class + // visitor. + HALIDE_NEVER_INLINE Expr visit_call(const Call *op) { auto rewrite = IRMatcher::rewriter(op, op->type); - if (rewrite(intrin(Call::abs, widening_sub(x, y)), cast(op->type, intrin(Call::absd, x, y))) || + if (rewrite(abs(widening_sub(x, y)), cast(op->type, absd(x, y))) || false) { return rewrite.result; } diff --git a/src/HexagonOptimize.cpp b/src/HexagonOptimize.cpp index 35a1ef780fff..92f3965e1600 100644 --- a/src/HexagonOptimize.cpp +++ b/src/HexagonOptimize.cpp @@ -720,64 +720,69 @@ class OptimizePatterns : public IRMutator { if (mpyadd.defined()) { return mpyadd; } - static const vector adds = { - // Use accumulating versions of vmpa, vdmpy, vrmpy instructions when possible. - {"halide.hexagon.acc_add_2mpy.vh.vub.vub.b.b", wild_i16x + halide_hexagon_add_2mpy(Int(16, 0), ".vub.vub.b.b", wild_u8x, wild_u8x, wild_i8, wild_i8), Pattern::ReinterleaveOp0}, - {"halide.hexagon.acc_add_2mpy.vw.vh.vh.b.b", wild_i32x + halide_hexagon_add_2mpy(Int(32, 0), ".vh.vh.b.b", wild_i16x, wild_i16x, wild_i8, wild_i8), Pattern::ReinterleaveOp0}, - {"halide.hexagon.acc_add_2mpy.vh.vub.b", wild_i16x + halide_hexagon_add_2mpy(Int(16, 0), ".vub.b", wild_u8x, wild_i32)}, - {"halide.hexagon.acc_add_2mpy.vw.vh.b", wild_i32x + halide_hexagon_add_2mpy(Int(32, 0), ".vh.b", wild_i16x, wild_i32)}, - {"halide.hexagon.acc_add_3mpy.vh.vub.b", wild_i16x + native_interleave(halide_hexagon_add_3mpy(Int(16, 0), ".vub.b", wild_u8x, wild_i32)), Pattern::ReinterleaveOp0}, - {"halide.hexagon.acc_add_3mpy.vh.vb.b", wild_i16x + native_interleave(halide_hexagon_add_3mpy(Int(16, 0), ".vb.b", wild_i8x, wild_i32)), Pattern::ReinterleaveOp0}, - {"halide.hexagon.acc_add_3mpy.vw.vh.b", wild_i32x + native_interleave(halide_hexagon_add_3mpy(Int(32, 0), ".vh.b", wild_i16x, wild_i32)), Pattern::ReinterleaveOp0}, - {"halide.hexagon.acc_add_4mpy.vw.vub.b", wild_i32x + halide_hexagon_add_4mpy(Int(32, 0), ".vub.b", wild_u8x, wild_i32)}, - {"halide.hexagon.acc_add_4mpy.vuw.vub.ub", wild_u32x + halide_hexagon_add_4mpy(UInt(32, 0), ".vub.ub", wild_u8x, wild_u32)}, - {"halide.hexagon.acc_add_4mpy.vuw.vub.ub", wild_i32x + halide_hexagon_add_4mpy(Int(32, 0), ".vub.ub", wild_u8x, wild_u32)}, - {"halide.hexagon.acc_add_4mpy.vuw.vub.vub", wild_u32x + halide_hexagon_add_4mpy(UInt(32, 0), ".vub.vub", wild_u8x, wild_u8x)}, - {"halide.hexagon.acc_add_4mpy.vuw.vub.vub", wild_i32x + halide_hexagon_add_4mpy(Int(32, 0), ".vub.vub", wild_u8x, wild_u8x)}, - {"halide.hexagon.acc_add_4mpy.vw.vub.vb", wild_i32x + halide_hexagon_add_4mpy(Int(32, 0), ".vub.vb", wild_u8x, wild_i8x)}, - {"halide.hexagon.acc_add_4mpy.vw.vb.vb", wild_i32x + halide_hexagon_add_4mpy(Int(32, 0), ".vb.vb", wild_i8x, wild_i8x)}, - - // Widening multiply-accumulates with a scalar. - {"halide.hexagon.add_mpy.vuh.vub.ub", wild_u16x + widening_mul(wild_u8x, wild_u8), Pattern::ReinterleaveOp0}, - {"halide.hexagon.add_mpy.vh.vub.b", wild_i16x + widening_mul(wild_u8x, wild_i8), Pattern::ReinterleaveOp0}, - {"halide.hexagon.add_mpy.vuw.vuh.uh", wild_u32x + widening_mul(wild_u16x, wild_u16), Pattern::ReinterleaveOp0}, - - // These patterns aren't exactly right because the instruction - // saturates the result. However, this is really the instruction - // that we want to use in most cases, and we can exploit the fact - // that 32 bit signed arithmetic overflow is undefined to argue - // that these patterns are not completely incorrect. - {"halide.hexagon.satw_add_mpy.vw.vh.h", wild_i32x + widening_mul(wild_i16x, wild_i16), Pattern::ReinterleaveOp0}, - - // Widening multiply-accumulates. - {"halide.hexagon.add_mpy.vuh.vub.vub", wild_u16x + widening_mul(wild_u8x, wild_u8x), Pattern::ReinterleaveOp0}, - {"halide.hexagon.add_mpy.vuw.vuh.vuh", wild_u32x + widening_mul(wild_u16x, wild_u16x), Pattern::ReinterleaveOp0}, - {"halide.hexagon.add_mpy.vh.vb.vb", wild_i16x + widening_mul(wild_i8x, wild_i8x), Pattern::ReinterleaveOp0}, - {"halide.hexagon.add_mpy.vw.vh.vh", wild_i32x + widening_mul(wild_i16x, wild_i16x), Pattern::ReinterleaveOp0}, - - {"halide.hexagon.add_mpy.vh.vub.vb", wild_i16x + widening_mul(wild_u8x, wild_i8x), Pattern::ReinterleaveOp0}, - {"halide.hexagon.add_mpy.vw.vh.vuh", wild_i32x + widening_mul(wild_i16x, wild_u16x), Pattern::ReinterleaveOp0}, - {"halide.hexagon.add_mpy.vh.vub.vb", wild_i16x + widening_mul(wild_i8x, wild_u8x), Pattern::ReinterleaveOp0 | Pattern::SwapOps12}, - {"halide.hexagon.add_mpy.vw.vh.vuh", wild_i32x + widening_mul(wild_u16x, wild_i16x), Pattern::ReinterleaveOp0 | Pattern::SwapOps12}, - - // Shift-accumulates. - {"halide.hexagon.add_shr.vw.vw.uw", wild_i32x + (wild_i32x >> wild_u32)}, - {"halide.hexagon.add_shl.vw.vw.uw", wild_i32x + (wild_i32x << wild_u32)}, - {"halide.hexagon.add_shl.vw.vw.uw", wild_u32x + (wild_u32x << wild_u32)}, - {"halide.hexagon.add_shl.vh.vh.uh", wild_i16x + (wild_i16x << wild_u16), Pattern::v65orLater}, - {"halide.hexagon.add_shl.vh.vh.uh", wild_u16x + (wild_u16x << wild_u16), Pattern::v65orLater}, - {"halide.hexagon.add_shr.vh.vh.uh", wild_i16x + (wild_i16x >> wild_u16), Pattern::v65orLater}, - {"halide.hexagon.add_shl.vh.vh.uh", wild_i16x + (wild_i16x << wild_i16), Pattern::v65orLater}, - {"halide.hexagon.add_shl.vh.vh.uh", wild_u16x + (wild_u16x << wild_u16), Pattern::v65orLater}, - - // Non-widening multiply-accumulates with a scalar. - {"halide.hexagon.add_mul.vh.vh.b", wild_i16x + widen_right_mul(wild_i16x, wild_i8)}, - {"halide.hexagon.add_mul.vw.vw.h", wild_i32x + widen_right_mul(wild_i32x, wild_i16)}, - // TODO: There's also a add_mul.vw.vw.b - - // This pattern is very general, so it must come last. - {"halide.hexagon.add_mul.vh.vh.vh", wild_i16x + wild_i16x * wild_i16x}, - }; + + static const vector adds = ([&]() HALIDE_NEVER_INLINE { + // Initialized in a non-inlined lambda to keep the large stack frame + // off the recursive path. + return vector{ + // Use accumulating versions of vmpa, vdmpy, vrmpy instructions when possible. + {"halide.hexagon.acc_add_2mpy.vh.vub.vub.b.b", wild_i16x + halide_hexagon_add_2mpy(Int(16, 0), ".vub.vub.b.b", wild_u8x, wild_u8x, wild_i8, wild_i8), Pattern::ReinterleaveOp0}, + {"halide.hexagon.acc_add_2mpy.vw.vh.vh.b.b", wild_i32x + halide_hexagon_add_2mpy(Int(32, 0), ".vh.vh.b.b", wild_i16x, wild_i16x, wild_i8, wild_i8), Pattern::ReinterleaveOp0}, + {"halide.hexagon.acc_add_2mpy.vh.vub.b", wild_i16x + halide_hexagon_add_2mpy(Int(16, 0), ".vub.b", wild_u8x, wild_i32)}, + {"halide.hexagon.acc_add_2mpy.vw.vh.b", wild_i32x + halide_hexagon_add_2mpy(Int(32, 0), ".vh.b", wild_i16x, wild_i32)}, + {"halide.hexagon.acc_add_3mpy.vh.vub.b", wild_i16x + native_interleave(halide_hexagon_add_3mpy(Int(16, 0), ".vub.b", wild_u8x, wild_i32)), Pattern::ReinterleaveOp0}, + {"halide.hexagon.acc_add_3mpy.vh.vb.b", wild_i16x + native_interleave(halide_hexagon_add_3mpy(Int(16, 0), ".vb.b", wild_i8x, wild_i32)), Pattern::ReinterleaveOp0}, + {"halide.hexagon.acc_add_3mpy.vw.vh.b", wild_i32x + native_interleave(halide_hexagon_add_3mpy(Int(32, 0), ".vh.b", wild_i16x, wild_i32)), Pattern::ReinterleaveOp0}, + {"halide.hexagon.acc_add_4mpy.vw.vub.b", wild_i32x + halide_hexagon_add_4mpy(Int(32, 0), ".vub.b", wild_u8x, wild_i32)}, + {"halide.hexagon.acc_add_4mpy.vuw.vub.ub", wild_u32x + halide_hexagon_add_4mpy(UInt(32, 0), ".vub.ub", wild_u8x, wild_u32)}, + {"halide.hexagon.acc_add_4mpy.vuw.vub.ub", wild_i32x + halide_hexagon_add_4mpy(Int(32, 0), ".vub.ub", wild_u8x, wild_u32)}, + {"halide.hexagon.acc_add_4mpy.vuw.vub.vub", wild_u32x + halide_hexagon_add_4mpy(UInt(32, 0), ".vub.vub", wild_u8x, wild_u8x)}, + {"halide.hexagon.acc_add_4mpy.vuw.vub.vub", wild_i32x + halide_hexagon_add_4mpy(Int(32, 0), ".vub.vub", wild_u8x, wild_u8x)}, + {"halide.hexagon.acc_add_4mpy.vw.vub.vb", wild_i32x + halide_hexagon_add_4mpy(Int(32, 0), ".vub.vb", wild_u8x, wild_i8x)}, + {"halide.hexagon.acc_add_4mpy.vw.vb.vb", wild_i32x + halide_hexagon_add_4mpy(Int(32, 0), ".vb.vb", wild_i8x, wild_i8x)}, + + // Widening multiply-accumulates with a scalar. + {"halide.hexagon.add_mpy.vuh.vub.ub", wild_u16x + widening_mul(wild_u8x, wild_u8), Pattern::ReinterleaveOp0}, + {"halide.hexagon.add_mpy.vh.vub.b", wild_i16x + widening_mul(wild_u8x, wild_i8), Pattern::ReinterleaveOp0}, + {"halide.hexagon.add_mpy.vuw.vuh.uh", wild_u32x + widening_mul(wild_u16x, wild_u16), Pattern::ReinterleaveOp0}, + + // These patterns aren't exactly right because the instruction + // saturates the result. However, this is really the instruction + // that we want to use in most cases, and we can exploit the fact + // that 32 bit signed arithmetic overflow is undefined to argue + // that these patterns are not completely incorrect. + {"halide.hexagon.satw_add_mpy.vw.vh.h", wild_i32x + widening_mul(wild_i16x, wild_i16), Pattern::ReinterleaveOp0}, + + // Widening multiply-accumulates. + {"halide.hexagon.add_mpy.vuh.vub.vub", wild_u16x + widening_mul(wild_u8x, wild_u8x), Pattern::ReinterleaveOp0}, + {"halide.hexagon.add_mpy.vuw.vuh.vuh", wild_u32x + widening_mul(wild_u16x, wild_u16x), Pattern::ReinterleaveOp0}, + {"halide.hexagon.add_mpy.vh.vb.vb", wild_i16x + widening_mul(wild_i8x, wild_i8x), Pattern::ReinterleaveOp0}, + {"halide.hexagon.add_mpy.vw.vh.vh", wild_i32x + widening_mul(wild_i16x, wild_i16x), Pattern::ReinterleaveOp0}, + + {"halide.hexagon.add_mpy.vh.vub.vb", wild_i16x + widening_mul(wild_u8x, wild_i8x), Pattern::ReinterleaveOp0}, + {"halide.hexagon.add_mpy.vw.vh.vuh", wild_i32x + widening_mul(wild_i16x, wild_u16x), Pattern::ReinterleaveOp0}, + {"halide.hexagon.add_mpy.vh.vub.vb", wild_i16x + widening_mul(wild_i8x, wild_u8x), Pattern::ReinterleaveOp0 | Pattern::SwapOps12}, + {"halide.hexagon.add_mpy.vw.vh.vuh", wild_i32x + widening_mul(wild_u16x, wild_i16x), Pattern::ReinterleaveOp0 | Pattern::SwapOps12}, + + // Shift-accumulates. + {"halide.hexagon.add_shr.vw.vw.uw", wild_i32x + (wild_i32x >> wild_u32)}, + {"halide.hexagon.add_shl.vw.vw.uw", wild_i32x + (wild_i32x << wild_u32)}, + {"halide.hexagon.add_shl.vw.vw.uw", wild_u32x + (wild_u32x << wild_u32)}, + {"halide.hexagon.add_shl.vh.vh.uh", wild_i16x + (wild_i16x << wild_u16), Pattern::v65orLater}, + {"halide.hexagon.add_shl.vh.vh.uh", wild_u16x + (wild_u16x << wild_u16), Pattern::v65orLater}, + {"halide.hexagon.add_shr.vh.vh.uh", wild_i16x + (wild_i16x >> wild_u16), Pattern::v65orLater}, + {"halide.hexagon.add_shl.vh.vh.uh", wild_i16x + (wild_i16x << wild_i16), Pattern::v65orLater}, + {"halide.hexagon.add_shl.vh.vh.uh", wild_u16x + (wild_u16x << wild_u16), Pattern::v65orLater}, + + // Non-widening multiply-accumulates with a scalar. + {"halide.hexagon.add_mul.vh.vh.b", wild_i16x + widen_right_mul(wild_i16x, wild_i8)}, + {"halide.hexagon.add_mul.vw.vw.h", wild_i32x + widen_right_mul(wild_i32x, wild_i16)}, + // TODO: There's also a add_mul.vw.vw.b + + // This pattern is very general, so it must come last. + {"halide.hexagon.add_mul.vh.vh.vh", wild_i16x + wild_i16x * wild_i16x}, + }; + })(); if (op->type.is_vector()) { Expr new_expr = apply_commutative_patterns(op, adds, target, this); @@ -821,43 +826,47 @@ class OptimizePatterns : public IRMutator { } Expr visit(const Cast *op) override { - static const vector casts = { - // Halving unsigned subtract. - {"halide.hexagon.navg.vub.vub", i8(widening_sub(wild_u8x, wild_u8x) >> 1)}, - - // Narrowing casts. These may interleave later with trunclo. - {"halide.hexagon.packhi.vh", u8(wild_u16x >> 8)}, - {"halide.hexagon.packhi.vh", u8(wild_i16x >> 8)}, - {"halide.hexagon.packhi.vh", i8(wild_u16x >> 8)}, - {"halide.hexagon.packhi.vh", i8(wild_i16x >> 8)}, - {"halide.hexagon.packhi.vw", u16(wild_u32x >> 16)}, - {"halide.hexagon.packhi.vw", u16(wild_i32x >> 16)}, - {"halide.hexagon.packhi.vw", i16(wild_u32x >> 16)}, - {"halide.hexagon.packhi.vw", i16(wild_i32x >> 16)}, - - // Narrowing with shifting. - {"halide.hexagon.trunc_shr.vw.uw", i16(wild_i32x >> wild_u32), Pattern::DeinterleaveOp0}, - - // Narrowing casts. These may interleave later with trunc. - {"halide.hexagon.pack.vh", u8(wild_u16x)}, - {"halide.hexagon.pack.vh", u8(wild_i16x)}, - {"halide.hexagon.pack.vh", i8(wild_u16x)}, - {"halide.hexagon.pack.vh", i8(wild_i16x)}, - {"halide.hexagon.pack.vw", u16(wild_u32x)}, - {"halide.hexagon.pack.vw", u16(wild_i32x)}, - {"halide.hexagon.pack.vw", i16(wild_u32x)}, - {"halide.hexagon.pack.vw", i16(wild_i32x)}, - - // Widening casts - {"halide.hexagon.zxt.vub", u16(wild_u8x), Pattern::InterleaveResult}, - {"halide.hexagon.zxt.vub", i16(wild_u8x), Pattern::InterleaveResult}, - {"halide.hexagon.zxt.vuh", u32(wild_u16x), Pattern::InterleaveResult}, - {"halide.hexagon.zxt.vuh", i32(wild_u16x), Pattern::InterleaveResult}, - {"halide.hexagon.sxt.vb", u16(wild_i8x), Pattern::InterleaveResult}, - {"halide.hexagon.sxt.vb", i16(wild_i8x), Pattern::InterleaveResult}, - {"halide.hexagon.sxt.vh", u32(wild_i16x), Pattern::InterleaveResult}, - {"halide.hexagon.sxt.vh", i32(wild_i16x), Pattern::InterleaveResult}, - }; + static const vector casts = ([&]() HALIDE_NEVER_INLINE { + // Initialized in a non-inlined lambda to keep the large stack frame + // off the recursive path. + return vector{ + // Halving unsigned subtract. + {"halide.hexagon.navg.vub.vub", i8(widening_sub(wild_u8x, wild_u8x) >> 1)}, + + // Narrowing casts. These may interleave later with trunclo. + {"halide.hexagon.packhi.vh", u8(wild_u16x >> 8)}, + {"halide.hexagon.packhi.vh", u8(wild_i16x >> 8)}, + {"halide.hexagon.packhi.vh", i8(wild_u16x >> 8)}, + {"halide.hexagon.packhi.vh", i8(wild_i16x >> 8)}, + {"halide.hexagon.packhi.vw", u16(wild_u32x >> 16)}, + {"halide.hexagon.packhi.vw", u16(wild_i32x >> 16)}, + {"halide.hexagon.packhi.vw", i16(wild_u32x >> 16)}, + {"halide.hexagon.packhi.vw", i16(wild_i32x >> 16)}, + + // Narrowing with shifting. + {"halide.hexagon.trunc_shr.vw.uw", i16(wild_i32x >> wild_u32), Pattern::DeinterleaveOp0}, + + // Narrowing casts. These may interleave later with trunc. + {"halide.hexagon.pack.vh", u8(wild_u16x)}, + {"halide.hexagon.pack.vh", u8(wild_i16x)}, + {"halide.hexagon.pack.vh", i8(wild_u16x)}, + {"halide.hexagon.pack.vh", i8(wild_i16x)}, + {"halide.hexagon.pack.vw", u16(wild_u32x)}, + {"halide.hexagon.pack.vw", u16(wild_i32x)}, + {"halide.hexagon.pack.vw", i16(wild_u32x)}, + {"halide.hexagon.pack.vw", i16(wild_i32x)}, + + // Widening casts + {"halide.hexagon.zxt.vub", u16(wild_u8x), Pattern::InterleaveResult}, + {"halide.hexagon.zxt.vub", i16(wild_u8x), Pattern::InterleaveResult}, + {"halide.hexagon.zxt.vuh", u32(wild_u16x), Pattern::InterleaveResult}, + {"halide.hexagon.zxt.vuh", i32(wild_u16x), Pattern::InterleaveResult}, + {"halide.hexagon.sxt.vb", u16(wild_i8x), Pattern::InterleaveResult}, + {"halide.hexagon.sxt.vb", i16(wild_i8x), Pattern::InterleaveResult}, + {"halide.hexagon.sxt.vh", u32(wild_i16x), Pattern::InterleaveResult}, + {"halide.hexagon.sxt.vh", i32(wild_i16x), Pattern::InterleaveResult}, + }; + })(); // To hit more of the patterns we want, rewrite "double casts" // as two stage casts. This also avoids letting vector casts @@ -935,102 +944,106 @@ class OptimizePatterns : public IRMutator { } } - static const vector calls = { - // Non-widening scalar multiplication. - {"halide.hexagon.mul.vh.b", widen_right_mul(wild_i16x, wild_i8)}, - {"halide.hexagon.mul.vw.h", widen_right_mul(wild_i32x, wild_i16)}, - // TODO: There's also mul.vw.b. We currently generate mul.vw.h - // instead. I'm not sure mul.vw.b is faster, it might even be - // slower due to the extra step in broadcasting the scalar up to - // 32 bits. - - // One operand widening multiplication. - {"halide.hexagon.mul.vw.vh", widen_right_mul(wild_i32x, wild_i16x), Pattern::ReinterleaveOp0}, - {"halide.hexagon.mul.vw.vuh", widen_right_mul(wild_u32x, wild_u16x), Pattern::ReinterleaveOp0}, - - // Saturating narrowing casts with rounding - {"halide.hexagon.trunc_satub_rnd.vh", u8_sat(rounding_shift_right(wild_i16x, 8)), Pattern::DeinterleaveOp0}, - {"halide.hexagon.trunc_satb_rnd.vh", i8_sat(rounding_shift_right(wild_i16x, 8)), Pattern::DeinterleaveOp0}, - {"halide.hexagon.trunc_satub_rnd.vuh", u8_sat(rounding_shift_right(wild_u16x, 8)), Pattern::DeinterleaveOp0}, - {"halide.hexagon.trunc_satuh_rnd.vw", u16_sat(rounding_shift_right(wild_i32x, 16)), Pattern::DeinterleaveOp0}, - {"halide.hexagon.trunc_sath_rnd.vw", i16_sat(rounding_shift_right(wild_i32x, 16)), Pattern::DeinterleaveOp0}, - {"halide.hexagon.trunc_satuh_rnd.vuw", u16_sat(rounding_shift_right(wild_u32x, 16)), Pattern::DeinterleaveOp0}, - - // Saturating narrowing casts with rounding - {"halide.hexagon.trunc_satub_shr_rnd.vh", u8_sat(rounding_shift_right(wild_i16x, wild_u16)), Pattern::DeinterleaveOp0}, - {"halide.hexagon.trunc_satb_shr_rnd.vh", i8_sat(rounding_shift_right(wild_i16x, wild_u16)), Pattern::DeinterleaveOp0}, - {"halide.hexagon.trunc_satub_shr_rnd.vuh", u8_sat(rounding_shift_right(wild_u16x, wild_u16)), Pattern::DeinterleaveOp0 | Pattern::v65orLater}, - {"halide.hexagon.trunc_satuh_shr_rnd.vw", u16_sat(rounding_shift_right(wild_i32x, wild_u32)), Pattern::DeinterleaveOp0}, - {"halide.hexagon.trunc_sath_shr_rnd.vw", i16_sat(rounding_shift_right(wild_i32x, wild_u32)), Pattern::DeinterleaveOp0}, - {"halide.hexagon.trunc_satuh_shr_rnd.vuw", u16_sat(rounding_shift_right(wild_u32x, wild_u32)), Pattern::DeinterleaveOp0}, - - // Saturating narrowing casts - {"halide.hexagon.trunc_satub_shr.vh.uh", u8_sat(wild_i16x >> wild_u16), Pattern::DeinterleaveOp0}, - {"halide.hexagon.trunc_satuh_shr.vw.uw", u16_sat(wild_i32x >> wild_u32), Pattern::DeinterleaveOp0}, - {"halide.hexagon.trunc_sath_shr.vw.uw", i16_sat(wild_i32x >> wild_u32), Pattern::DeinterleaveOp0}, - - // For some of the following narrowing casts, we have the choice of - // non-interleaving or interleaving instructions. Because we don't - // know which one we prefer during pattern matching, we match the - // non-interleaving versions for now and replace them with the - // instructions that interleave later if it makes sense. - - // Saturating narrowing casts. These may interleave later with trunc_sat. - {"halide.hexagon.pack_satub.vh", u8_sat(wild_i16x)}, - {"halide.hexagon.pack_satuh.vw", u16_sat(wild_i32x)}, - {"halide.hexagon.pack_satb.vh", i8_sat(wild_i16x)}, - {"halide.hexagon.pack_sath.vw", i16_sat(wild_i32x)}, - // The same patterns as above, but with safely reinterpreting the - // argument to be signed. - {"halide.hexagon.pack_satub.vh", u8_sat(wild_u16x), Pattern::SafeReinterpretOp0}, - {"halide.hexagon.pack_satuh.vw", u16_sat(wild_u32x), Pattern::SafeReinterpretOp0}, - {"halide.hexagon.pack_satb.vh", i8_sat(wild_u16x), Pattern::SafeReinterpretOp0}, - {"halide.hexagon.pack_sath.vw", i16_sat(wild_u32x), Pattern::SafeReinterpretOp0}, - // Slightly more expensive versions of uint saturation casts than the reinterpret - // patterns above, these perform vpack(min(UMAX, x)). - {"halide.hexagon.pack_satub.vuh", u8_sat(wild_u16x)}, - {"halide.hexagon.pack_satuh.vuw", u16_sat(wild_u32x)}, - - // We don't have a vpack equivalent to this one, so we match it directly. - {"halide.hexagon.trunc_satuh.vuw", u16_sat(wild_u32x), Pattern::DeinterleaveOp0}, - - // Multiply keep high half. - {"halide.hexagon.trunc_mpy.vw.vw", mul_shift_right(wild_i32x, wild_i32x, 32)}, - - // Scalar multiply keep high half, with multiplication by 2. - {"halide.hexagon.trunc_satw_mpy2.vh.h", mul_shift_right(wild_i16x, wild_i16, 15)}, - {"halide.hexagon.trunc_satw_mpy2.vh.h", mul_shift_right(wild_i16, wild_i16x, 15), Pattern::SwapOps01}, - {"halide.hexagon.trunc_satdw_mpy2.vw.vw", mul_shift_right(wild_i32x, wild_i32x, 31)}, - - // Scalar and vector multiply keep high half, with multiplication by 2, and rounding. - {"halide.hexagon.trunc_satw_mpy2_rnd.vh.h", rounding_mul_shift_right(wild_i16x, wild_i16, 15)}, - {"halide.hexagon.trunc_satw_mpy2_rnd.vh.h", rounding_mul_shift_right(wild_i16, wild_i16x, 15), Pattern::SwapOps01}, - {"halide.hexagon.trunc_satw_mpy2_rnd.vh.vh", rounding_mul_shift_right(wild_i16x, wild_i16x, 15)}, - {"halide.hexagon.trunc_satdw_mpy2_rnd.vw.vw", rounding_mul_shift_right(wild_i32x, wild_i32x, 31)}, - - // Vector by scalar widening multiplies. These need to happen before the ones below, to avoid - // using vector versions when scalar versions would suffice. - {"halide.hexagon.mpy.vub.ub", widening_mul(wild_u8x, wild_u8), Pattern::InterleaveResult}, - {"halide.hexagon.mpy.vub.b", widening_mul(wild_u8x, wild_i8), Pattern::InterleaveResult}, - {"halide.hexagon.mpy.vuh.uh", widening_mul(wild_u16x, wild_u16), Pattern::InterleaveResult}, - {"halide.hexagon.mpy.vh.h", widening_mul(wild_i16x, wild_i16), Pattern::InterleaveResult}, - - // These are calls that are almost trivial, but they differ due to interleaving. - {"halide.hexagon.add_vuh.vub.vub", widening_add(wild_u8x, wild_u8x), Pattern::InterleaveResult}, - {"halide.hexagon.add_vuw.vuh.vuh", widening_add(wild_u16x, wild_u16x), Pattern::InterleaveResult}, - {"halide.hexagon.add_vw.vh.vh", widening_add(wild_i16x, wild_i16x), Pattern::InterleaveResult}, - {"halide.hexagon.sub_vh.vub.vub", widening_sub(wild_u8x, wild_u8x), Pattern::InterleaveResult}, - {"halide.hexagon.sub_vw.vuh.vuh", widening_sub(wild_u16x, wild_u16x), Pattern::InterleaveResult}, - {"halide.hexagon.sub_vw.vh.vh", widening_sub(wild_i16x, wild_i16x), Pattern::InterleaveResult}, - {"halide.hexagon.mpy.vub.vub", widening_mul(wild_u8x, wild_u8x), Pattern::InterleaveResult}, - {"halide.hexagon.mpy.vub.vb", widening_mul(wild_u8x, wild_i8x), Pattern::InterleaveResult}, - {"halide.hexagon.mpy.vub.vb", widening_mul(wild_i8x, wild_u8x), Pattern::InterleaveResult | Pattern::SwapOps01}, - {"halide.hexagon.mpy.vb.vb", widening_mul(wild_i8x, wild_i8x), Pattern::InterleaveResult}, - {"halide.hexagon.mpy.vuh.vuh", widening_mul(wild_u16x, wild_u16x), Pattern::InterleaveResult}, - {"halide.hexagon.mpy.vh.vh", widening_mul(wild_i16x, wild_i16x), Pattern::InterleaveResult}, - {"halide.hexagon.mpy.vh.vuh", widening_mul(wild_i16x, wild_u16x), Pattern::InterleaveResult}, - {"halide.hexagon.mpy.vh.vuh", widening_mul(wild_u16x, wild_i16x), Pattern::InterleaveResult | Pattern::SwapOps01}, - }; + static const vector calls = ([&]() HALIDE_NEVER_INLINE { + // Initialized in a non-inlined lambda to keep the large stack frame + // off the recursive path. + return vector{ + // Non-widening scalar multiplication. + {"halide.hexagon.mul.vh.b", widen_right_mul(wild_i16x, wild_i8)}, + {"halide.hexagon.mul.vw.h", widen_right_mul(wild_i32x, wild_i16)}, + // TODO: There's also mul.vw.b. We currently generate mul.vw.h + // instead. I'm not sure mul.vw.b is faster, it might even be + // slower due to the extra step in broadcasting the scalar up to + // 32 bits. + + // One operand widening multiplication. + {"halide.hexagon.mul.vw.vh", widen_right_mul(wild_i32x, wild_i16x), Pattern::ReinterleaveOp0}, + {"halide.hexagon.mul.vw.vuh", widen_right_mul(wild_u32x, wild_u16x), Pattern::ReinterleaveOp0}, + + // Saturating narrowing casts with rounding + {"halide.hexagon.trunc_satub_rnd.vh", u8_sat(rounding_shift_right(wild_i16x, 8)), Pattern::DeinterleaveOp0}, + {"halide.hexagon.trunc_satb_rnd.vh", i8_sat(rounding_shift_right(wild_i16x, 8)), Pattern::DeinterleaveOp0}, + {"halide.hexagon.trunc_satub_rnd.vuh", u8_sat(rounding_shift_right(wild_u16x, 8)), Pattern::DeinterleaveOp0}, + {"halide.hexagon.trunc_satuh_rnd.vw", u16_sat(rounding_shift_right(wild_i32x, 16)), Pattern::DeinterleaveOp0}, + {"halide.hexagon.trunc_sath_rnd.vw", i16_sat(rounding_shift_right(wild_i32x, 16)), Pattern::DeinterleaveOp0}, + {"halide.hexagon.trunc_satuh_rnd.vuw", u16_sat(rounding_shift_right(wild_u32x, 16)), Pattern::DeinterleaveOp0}, + + // Saturating narrowing casts with rounding + {"halide.hexagon.trunc_satub_shr_rnd.vh", u8_sat(rounding_shift_right(wild_i16x, wild_u16)), Pattern::DeinterleaveOp0}, + {"halide.hexagon.trunc_satb_shr_rnd.vh", i8_sat(rounding_shift_right(wild_i16x, wild_u16)), Pattern::DeinterleaveOp0}, + {"halide.hexagon.trunc_satub_shr_rnd.vuh", u8_sat(rounding_shift_right(wild_u16x, wild_u16)), Pattern::DeinterleaveOp0 | Pattern::v65orLater}, + {"halide.hexagon.trunc_satuh_shr_rnd.vw", u16_sat(rounding_shift_right(wild_i32x, wild_u32)), Pattern::DeinterleaveOp0}, + {"halide.hexagon.trunc_sath_shr_rnd.vw", i16_sat(rounding_shift_right(wild_i32x, wild_u32)), Pattern::DeinterleaveOp0}, + {"halide.hexagon.trunc_satuh_shr_rnd.vuw", u16_sat(rounding_shift_right(wild_u32x, wild_u32)), Pattern::DeinterleaveOp0}, + + // Saturating narrowing casts + {"halide.hexagon.trunc_satub_shr.vh.uh", u8_sat(wild_i16x >> wild_u16), Pattern::DeinterleaveOp0}, + {"halide.hexagon.trunc_satuh_shr.vw.uw", u16_sat(wild_i32x >> wild_u32), Pattern::DeinterleaveOp0}, + {"halide.hexagon.trunc_sath_shr.vw.uw", i16_sat(wild_i32x >> wild_u32), Pattern::DeinterleaveOp0}, + + // For some of the following narrowing casts, we have the choice of + // non-interleaving or interleaving instructions. Because we don't + // know which one we prefer during pattern matching, we match the + // non-interleaving versions for now and replace them with the + // instructions that interleave later if it makes sense. + + // Saturating narrowing casts. These may interleave later with trunc_sat. + {"halide.hexagon.pack_satub.vh", u8_sat(wild_i16x)}, + {"halide.hexagon.pack_satuh.vw", u16_sat(wild_i32x)}, + {"halide.hexagon.pack_satb.vh", i8_sat(wild_i16x)}, + {"halide.hexagon.pack_sath.vw", i16_sat(wild_i32x)}, + // The same patterns as above, but with safely reinterpreting the + // argument to be signed. + {"halide.hexagon.pack_satub.vh", u8_sat(wild_u16x), Pattern::SafeReinterpretOp0}, + {"halide.hexagon.pack_satuh.vw", u16_sat(wild_u32x), Pattern::SafeReinterpretOp0}, + {"halide.hexagon.pack_satb.vh", i8_sat(wild_u16x), Pattern::SafeReinterpretOp0}, + {"halide.hexagon.pack_sath.vw", i16_sat(wild_u32x), Pattern::SafeReinterpretOp0}, + // Slightly more expensive versions of uint saturation casts than the reinterpret + // patterns above, these perform vpack(min(UMAX, x)). + {"halide.hexagon.pack_satub.vuh", u8_sat(wild_u16x)}, + {"halide.hexagon.pack_satuh.vuw", u16_sat(wild_u32x)}, + + // We don't have a vpack equivalent to this one, so we match it directly. + {"halide.hexagon.trunc_satuh.vuw", u16_sat(wild_u32x), Pattern::DeinterleaveOp0}, + + // Multiply keep high half. + {"halide.hexagon.trunc_mpy.vw.vw", mul_shift_right(wild_i32x, wild_i32x, 32)}, + + // Scalar multiply keep high half, with multiplication by 2. + {"halide.hexagon.trunc_satw_mpy2.vh.h", mul_shift_right(wild_i16x, wild_i16, 15)}, + {"halide.hexagon.trunc_satw_mpy2.vh.h", mul_shift_right(wild_i16, wild_i16x, 15), Pattern::SwapOps01}, + {"halide.hexagon.trunc_satdw_mpy2.vw.vw", mul_shift_right(wild_i32x, wild_i32x, 31)}, + + // Scalar and vector multiply keep high half, with multiplication by 2, and rounding. + {"halide.hexagon.trunc_satw_mpy2_rnd.vh.h", rounding_mul_shift_right(wild_i16x, wild_i16, 15)}, + {"halide.hexagon.trunc_satw_mpy2_rnd.vh.h", rounding_mul_shift_right(wild_i16, wild_i16x, 15), Pattern::SwapOps01}, + {"halide.hexagon.trunc_satw_mpy2_rnd.vh.vh", rounding_mul_shift_right(wild_i16x, wild_i16x, 15)}, + {"halide.hexagon.trunc_satdw_mpy2_rnd.vw.vw", rounding_mul_shift_right(wild_i32x, wild_i32x, 31)}, + + // Vector by scalar widening multiplies. These need to happen before the ones below, to avoid + // using vector versions when scalar versions would suffice. + {"halide.hexagon.mpy.vub.ub", widening_mul(wild_u8x, wild_u8), Pattern::InterleaveResult}, + {"halide.hexagon.mpy.vub.b", widening_mul(wild_u8x, wild_i8), Pattern::InterleaveResult}, + {"halide.hexagon.mpy.vuh.uh", widening_mul(wild_u16x, wild_u16), Pattern::InterleaveResult}, + {"halide.hexagon.mpy.vh.h", widening_mul(wild_i16x, wild_i16), Pattern::InterleaveResult}, + + // These are calls that are almost trivial, but they differ due to interleaving. + {"halide.hexagon.add_vuh.vub.vub", widening_add(wild_u8x, wild_u8x), Pattern::InterleaveResult}, + {"halide.hexagon.add_vuw.vuh.vuh", widening_add(wild_u16x, wild_u16x), Pattern::InterleaveResult}, + {"halide.hexagon.add_vw.vh.vh", widening_add(wild_i16x, wild_i16x), Pattern::InterleaveResult}, + {"halide.hexagon.sub_vh.vub.vub", widening_sub(wild_u8x, wild_u8x), Pattern::InterleaveResult}, + {"halide.hexagon.sub_vw.vuh.vuh", widening_sub(wild_u16x, wild_u16x), Pattern::InterleaveResult}, + {"halide.hexagon.sub_vw.vh.vh", widening_sub(wild_i16x, wild_i16x), Pattern::InterleaveResult}, + {"halide.hexagon.mpy.vub.vub", widening_mul(wild_u8x, wild_u8x), Pattern::InterleaveResult}, + {"halide.hexagon.mpy.vub.vb", widening_mul(wild_u8x, wild_i8x), Pattern::InterleaveResult}, + {"halide.hexagon.mpy.vub.vb", widening_mul(wild_i8x, wild_u8x), Pattern::InterleaveResult | Pattern::SwapOps01}, + {"halide.hexagon.mpy.vb.vb", widening_mul(wild_i8x, wild_i8x), Pattern::InterleaveResult}, + {"halide.hexagon.mpy.vuh.vuh", widening_mul(wild_u16x, wild_u16x), Pattern::InterleaveResult}, + {"halide.hexagon.mpy.vh.vh", widening_mul(wild_i16x, wild_i16x), Pattern::InterleaveResult}, + {"halide.hexagon.mpy.vh.vuh", widening_mul(wild_i16x, wild_u16x), Pattern::InterleaveResult}, + {"halide.hexagon.mpy.vh.vuh", widening_mul(wild_u16x, wild_i16x), Pattern::InterleaveResult | Pattern::SwapOps01}, + }; + })(); // To hit more of the patterns we want, rewrite "double casts" // as two stage casts. This also avoids letting vector casts @@ -1187,63 +1200,66 @@ class VectorReducePatterns : public IRMutator { int factor = in_lanes / out_lanes; // Map of instruction signatures - // clang-format off - static const vector sigs = { - // --------- vrmpy --------- - // Sliding window - {4, 32, widening_mul(wild_u8x, wild_u8x), Signature::SlidingWindow | Signature::ScalarB}, - {4, 32, widening_mul(wild_u8x, wild_i8x), Signature::SlidingWindow | Signature::ScalarB}, - {4, 32, widening_mul(wild_i8x, wild_u8x), Signature::SlidingWindow | Signature::ScalarB | Signature::SwapOps}, - // Vector * Scalar - {4, 32, widening_mul(wild_u8x, wild_u8x), Signature::ScalarB}, - {4, 32, widening_mul(wild_i8x, wild_u8x), Signature::ScalarB}, - {4, 32, widening_mul(wild_u8x, wild_i8x), Signature::ScalarB}, - {4, 32, widening_mul(wild_i8x, wild_u8x), Signature::ScalarB | Signature::SwapOps}, - // Vector * Vector - {4, 32, widening_mul(wild_u8x, wild_u8x)}, - {4, 32, widening_mul(wild_u8x, wild_i8x)}, - {4, 32, widening_mul(wild_i8x, wild_u8x), Signature::SwapOps}, - {4, 32, widening_mul(wild_i8x, wild_i8x)}, - // Sum - {4, 32, wild_u8x, Signature::SlidingWindow}, - {4, 32, wild_i8x, Signature::SlidingWindow}, - {4, 32, wild_u8x}, - {4, 32, wild_i8x}, - - // --------- vtmpy --------- - // Vtmpy has additional requirement that third coefficient b[2] - // needs to be 1. - // Sliding window - {3, 16, widening_mul(wild_i8x, wild_i8x), Signature::SlidingWindow | Signature::ScalarB}, - {3, 16, widening_mul(wild_u8x, wild_i8x), Signature::SlidingWindow | Signature::ScalarB}, - {3, 16, widening_mul(wild_i8x, wild_u8x), Signature::SlidingWindow | Signature::ScalarB | Signature::SwapOps}, - {3, 32, widening_mul(wild_i16x, wild_i16x), Signature::SlidingWindow | Signature::ScalarB}, - // Sum - {3, 16, wild_i8x, Signature::SlidingWindow}, - {3, 16, wild_u8x, Signature::SlidingWindow}, - {3, 32, wild_i16x, Signature::SlidingWindow}, - - // --------- vdmpy --------- - // Sliding window - {2, 16, widening_mul(wild_u8x, wild_i8x), Signature::SlidingWindow | Signature::ScalarB}, - {2, 16, widening_mul(wild_i8x, wild_u8x), Signature::SlidingWindow | Signature::ScalarB | Signature::SwapOps}, - {2, 32, widening_mul(wild_i16x, wild_i16x), Signature::SlidingWindow | Signature::ScalarB}, - // Vector * Scalar - {2, 16, widening_mul(wild_u8x, wild_i8x), Signature::ScalarB}, - {2, 16, widening_mul(wild_i8x, wild_u8x), Signature::ScalarB | Signature::SwapOps}, - {2, 32, widening_mul(wild_i16x, wild_i16x), Signature::ScalarB | Signature::NarrowB}, - {2, 32, widening_mul(wild_i16x, wild_u16x), Signature::ScalarB}, // Saturates - {2, 32, widening_mul(wild_u16x, wild_i16x), Signature::ScalarB | Signature::SwapOps}, // Saturates - {2, 32, widening_mul(wild_i16x, wild_i16x), Signature::ScalarB}, // Saturates - // Vector * Vector - {2, 32, widening_mul(wild_i16x, wild_i16x)}, // Saturates - // Sum - {2, 16, wild_u8x, Signature::SlidingWindow}, - {2, 32, wild_i16x, Signature::SlidingWindow}, - {2, 16, wild_u8x}, - {2, 32, wild_i16x}, - }; - // clang-format on + static const vector sigs = ([&]() HALIDE_NEVER_INLINE { + return vector{ + // clang-format off + // --------- vrmpy --------- + // Sliding window + {4, 32, widening_mul(wild_u8x, wild_u8x), Signature::SlidingWindow | Signature::ScalarB}, + {4, 32, widening_mul(wild_u8x, wild_i8x), Signature::SlidingWindow | Signature::ScalarB}, + {4, 32, widening_mul(wild_i8x, wild_u8x), Signature::SlidingWindow | Signature::ScalarB | Signature::SwapOps}, + // Vector * Scalar + {4, 32, widening_mul(wild_u8x, wild_u8x), Signature::ScalarB}, + {4, 32, widening_mul(wild_i8x, wild_u8x), Signature::ScalarB}, + {4, 32, widening_mul(wild_u8x, wild_i8x), Signature::ScalarB}, + {4, 32, widening_mul(wild_i8x, wild_u8x), Signature::ScalarB | Signature::SwapOps}, + // Vector * Vector + {4, 32, widening_mul(wild_u8x, wild_u8x)}, + {4, 32, widening_mul(wild_u8x, wild_i8x)}, + {4, 32, widening_mul(wild_i8x, wild_u8x), Signature::SwapOps}, + {4, 32, widening_mul(wild_i8x, wild_i8x)}, + // Sum + {4, 32, wild_u8x, Signature::SlidingWindow}, + {4, 32, wild_i8x, Signature::SlidingWindow}, + {4, 32, wild_u8x}, + {4, 32, wild_i8x}, + + // --------- vtmpy --------- + // Vtmpy has additional requirement that third coefficient b[2] + // needs to be 1. + // Sliding window + {3, 16, widening_mul(wild_i8x, wild_i8x), Signature::SlidingWindow | Signature::ScalarB}, + {3, 16, widening_mul(wild_u8x, wild_i8x), Signature::SlidingWindow | Signature::ScalarB}, + {3, 16, widening_mul(wild_i8x, wild_u8x), Signature::SlidingWindow | Signature::ScalarB | Signature::SwapOps}, + {3, 32, widening_mul(wild_i16x, wild_i16x), Signature::SlidingWindow | Signature::ScalarB}, + // Sum + {3, 16, wild_i8x, Signature::SlidingWindow}, + {3, 16, wild_u8x, Signature::SlidingWindow}, + {3, 32, wild_i16x, Signature::SlidingWindow}, + + // --------- vdmpy --------- + // Sliding window + {2, 16, widening_mul(wild_u8x, wild_i8x), Signature::SlidingWindow | Signature::ScalarB}, + {2, 16, widening_mul(wild_i8x, wild_u8x), Signature::SlidingWindow | Signature::ScalarB | Signature::SwapOps}, + {2, 32, widening_mul(wild_i16x, wild_i16x), Signature::SlidingWindow | Signature::ScalarB}, + // Vector * Scalar + {2, 16, widening_mul(wild_u8x, wild_i8x), Signature::ScalarB}, + {2, 16, widening_mul(wild_i8x, wild_u8x), Signature::ScalarB | Signature::SwapOps}, + {2, 32, widening_mul(wild_i16x, wild_i16x), Signature::ScalarB | Signature::NarrowB}, + {2, 32, widening_mul(wild_i16x, wild_u16x), Signature::ScalarB}, // Saturates + {2, 32, widening_mul(wild_u16x, wild_i16x), Signature::ScalarB | Signature::SwapOps}, // Saturates + {2, 32, widening_mul(wild_i16x, wild_i16x), Signature::ScalarB}, // Saturates + // Vector * Vector + {2, 32, widening_mul(wild_i16x, wild_i16x)}, // Saturates + // Sum + {2, 16, wild_u8x, Signature::SlidingWindow}, + {2, 32, wild_i16x, Signature::SlidingWindow}, + {2, 16, wild_u8x}, + {2, 32, wild_i16x}, + }; + // clang-format on + })(); + std::vector matches; for (const Signature &sig : sigs) { if (factor != sig.factor) { diff --git a/src/IRMatch.h b/src/IRMatch.h index 4ba3ed41ccfb..afc264582651 100644 --- a/src/IRMatch.h +++ b/src/IRMatch.h @@ -1325,15 +1325,30 @@ constexpr int const_min(int a, int b) { return a < b ? a : b; } -template +template +struct OptionalIntrinType { + bool check(const Type &) const { + return true; + } +}; + +template<> +struct OptionalIntrinType { + halide_type_t type; + bool check(const Type &t) const { + return t == Type(type); + } +}; + +template struct Intrin { struct pattern_tag {}; - Call::IntrinsicOp intrin; std::tuple args; // The type of the output of the intrinsic node. // Only necessary in cases where it can't be inferred // from the input types (e.g. saturating_cast). - Type optional_type_hint; + + OptionalIntrinType optional_type_hint; static constexpr uint32_t binds = bitwise_or_reduce((bindings::mask)...); @@ -1362,7 +1377,7 @@ struct Intrin { } const Call &c = (const Call &)e; return (c.is_intrinsic(intrin) && - ((optional_type_hint == Type()) || optional_type_hint == e.type) && + optional_type_hint.check(e.type) && match_args<0, bound>(0, c, state)); } @@ -1394,8 +1409,8 @@ struct Intrin { return likely_if_innermost(std::move(arg0)); } else if (intrin == Call::abs) { return abs(std::move(arg0)); - } else if (intrin == Call::saturating_cast) { - return saturating_cast(optional_type_hint, std::move(arg0)); + } else if constexpr (intrin == Call::saturating_cast) { + return saturating_cast(optional_type_hint.type, std::move(arg0)); } Expr arg1 = std::get(args).make(state, type_hint); @@ -1489,98 +1504,113 @@ struct Intrin { } HALIDE_ALWAYS_INLINE - Intrin(Call::IntrinsicOp intrin, Args... args) noexcept - : intrin(intrin), args(args...) { + Intrin(Args... args) noexcept + : args(args...) { } }; -template -std::ostream &operator<<(std::ostream &s, const Intrin &op) { - s << op.intrin << "("; +template +std::ostream &operator<<(std::ostream &s, const Intrin &op) { + s << intrin << "("; op.print_args(s); s << ")"; return s; } -template -HALIDE_ALWAYS_INLINE auto intrin(Call::IntrinsicOp intrinsic_op, Args... args) noexcept -> Intrin { - return {intrinsic_op, pattern_arg(args)...}; -} - template -auto widen_right_add(A &&a, B &&b) noexcept -> Intrin { - return {Call::widen_right_add, pattern_arg(a), pattern_arg(b)}; +auto widen_right_add(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto widen_right_mul(A &&a, B &&b) noexcept -> Intrin { - return {Call::widen_right_mul, pattern_arg(a), pattern_arg(b)}; +auto widen_right_mul(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto widen_right_sub(A &&a, B &&b) noexcept -> Intrin { - return {Call::widen_right_sub, pattern_arg(a), pattern_arg(b)}; +auto widen_right_sub(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto widening_add(A &&a, B &&b) noexcept -> Intrin { - return {Call::widening_add, pattern_arg(a), pattern_arg(b)}; +auto widening_add(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto widening_sub(A &&a, B &&b) noexcept -> Intrin { - return {Call::widening_sub, pattern_arg(a), pattern_arg(b)}; +auto widening_sub(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto widening_mul(A &&a, B &&b) noexcept -> Intrin { - return {Call::widening_mul, pattern_arg(a), pattern_arg(b)}; +auto widening_mul(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto saturating_add(A &&a, B &&b) noexcept -> Intrin { - return {Call::saturating_add, pattern_arg(a), pattern_arg(b)}; +auto saturating_add(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto saturating_sub(A &&a, B &&b) noexcept -> Intrin { - return {Call::saturating_sub, pattern_arg(a), pattern_arg(b)}; +auto saturating_sub(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto saturating_cast(const Type &t, A &&a) noexcept -> Intrin { - Intrin p = {Call::saturating_cast, pattern_arg(a)}; - p.optional_type_hint = t; +auto saturating_cast(const Type &t, A &&a) noexcept -> Intrin { + Intrin p = {pattern_arg(a)}; + p.optional_type_hint.type = t; return p; } template -auto halving_add(A &&a, B &&b) noexcept -> Intrin { - return {Call::halving_add, pattern_arg(a), pattern_arg(b)}; +auto halving_add(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto halving_sub(A &&a, B &&b) noexcept -> Intrin { - return {Call::halving_sub, pattern_arg(a), pattern_arg(b)}; +auto halving_sub(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto rounding_halving_add(A &&a, B &&b) noexcept -> Intrin { - return {Call::rounding_halving_add, pattern_arg(a), pattern_arg(b)}; +auto rounding_halving_add(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto shift_left(A &&a, B &&b) noexcept -> Intrin { - return {Call::shift_left, pattern_arg(a), pattern_arg(b)}; +auto shift_left(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto shift_right(A &&a, B &&b) noexcept -> Intrin { - return {Call::shift_right, pattern_arg(a), pattern_arg(b)}; +auto shift_right(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto rounding_shift_left(A &&a, B &&b) noexcept -> Intrin { - return {Call::rounding_shift_left, pattern_arg(a), pattern_arg(b)}; +auto rounding_shift_left(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto rounding_shift_right(A &&a, B &&b) noexcept -> Intrin { - return {Call::rounding_shift_right, pattern_arg(a), pattern_arg(b)}; +auto rounding_shift_right(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; } template -auto mul_shift_right(A &&a, B &&b, C &&c) noexcept -> Intrin { - return {Call::mul_shift_right, pattern_arg(a), pattern_arg(b), pattern_arg(c)}; +auto mul_shift_right(A &&a, B &&b, C &&c) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b), pattern_arg(c)}; } template -auto rounding_mul_shift_right(A &&a, B &&b, C &&c) noexcept -> Intrin { - return {Call::rounding_mul_shift_right, pattern_arg(a), pattern_arg(b), pattern_arg(c)}; +auto rounding_mul_shift_right(A &&a, B &&b, C &&c) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b), pattern_arg(c)}; +} + +template +auto abs(A &&a) noexcept -> Intrin { + return {pattern_arg(a)}; +} + +template +auto absd(A &&a, B &&b) noexcept -> Intrin { + return {pattern_arg(a), pattern_arg(b)}; +} + +template +auto likely(A &&a) noexcept -> Intrin { + return {pattern_arg(a)}; +} + +template +auto likely_if_innermost(A &&a) noexcept -> Intrin { + return {pattern_arg(a)}; } template @@ -2425,7 +2455,8 @@ template struct IsInt { struct pattern_tag {}; A a; - int bits, lanes; + uint8_t bits; + uint16_t lanes; constexpr static uint32_t binds = bindings::mask; @@ -2448,7 +2479,7 @@ struct IsInt { }; template -HALIDE_ALWAYS_INLINE auto is_int(A &&a, int bits = 0, int lanes = 0) noexcept -> IsInt { +HALIDE_ALWAYS_INLINE auto is_int(A &&a, uint8_t bits = 0, uint16_t lanes = 0) noexcept -> IsInt { assert_is_lvalue_if_expr(); return {pattern_arg(a), bits, lanes}; } @@ -2470,7 +2501,8 @@ template struct IsUInt { struct pattern_tag {}; A a; - int bits, lanes; + uint8_t bits; + uint16_t lanes; constexpr static uint32_t binds = bindings::mask; @@ -2493,7 +2525,7 @@ struct IsUInt { }; template -HALIDE_ALWAYS_INLINE auto is_uint(A &&a, int bits = 0, int lanes = 0) noexcept -> IsUInt { +HALIDE_ALWAYS_INLINE auto is_uint(A &&a, uint8_t bits = 0, uint16_t lanes = 0) noexcept -> IsUInt { assert_is_lvalue_if_expr(); return {pattern_arg(a), bits, lanes}; } diff --git a/src/IRPrinter.cpp b/src/IRPrinter.cpp index 73ef8652c144..741775f26420 100644 --- a/src/IRPrinter.cpp +++ b/src/IRPrinter.cpp @@ -594,6 +594,11 @@ void IRPrinter::visit(const UIntImm *op) { } void IRPrinter::visit(const FloatImm *op) { + const bool use_scientific_format = (op->value != 0.0) && (std::log10(std::abs(op->value)) < -6); + if (use_scientific_format) { + stream << std::scientific; + } + switch (op->type.bits()) { case 64: stream << op->value; @@ -607,6 +612,10 @@ void IRPrinter::visit(const FloatImm *op) { default: internal_error << "Bad bit-width for float: " << op->type << "\n"; } + + if (use_scientific_format) { + stream << std::fixed; + } } void IRPrinter::visit(const StringImm *op) { diff --git a/src/Simplify_Max.cpp b/src/Simplify_Max.cpp index 6f3ecc1999f7..a4893787efbb 100644 --- a/src/Simplify_Max.cpp +++ b/src/Simplify_Max.cpp @@ -91,10 +91,10 @@ Expr Simplify::visit(const Max *op, ExprInfo *info) { rewrite(max(select(x, w, max(z, y)), z), max(select(x, w, y), z)) || rewrite(max(select(x, w, max(z, y)), y), max(select(x, w, z), y)) || - rewrite(max(intrin(Call::likely, x), x), b) || - rewrite(max(x, intrin(Call::likely, x)), a) || - rewrite(max(intrin(Call::likely_if_innermost, x), x), b) || - rewrite(max(x, intrin(Call::likely_if_innermost, x)), a) || + rewrite(max(likely(x), x), b) || + rewrite(max(x, likely(x)), a) || + rewrite(max(likely_if_innermost(x), x), b) || + rewrite(max(x, likely_if_innermost(x)), a) || (no_overflow(op->type) && (rewrite(max(ramp(x, y, lanes), broadcast(z, lanes)), a, diff --git a/src/Simplify_Min.cpp b/src/Simplify_Min.cpp index beda5919900b..227a966cc7c4 100644 --- a/src/Simplify_Min.cpp +++ b/src/Simplify_Min.cpp @@ -92,10 +92,10 @@ Expr Simplify::visit(const Min *op, ExprInfo *info) { rewrite(min(select(x, w, min(z, y)), y), min(select(x, w, z), y)) || rewrite(min(select(x, w, min(z, y)), z), min(select(x, w, y), z)) || - rewrite(min(intrin(Call::likely, x), x), b) || - rewrite(min(x, intrin(Call::likely, x)), a) || - rewrite(min(intrin(Call::likely_if_innermost, x), x), b) || - rewrite(min(x, intrin(Call::likely_if_innermost, x)), a) || + rewrite(min(likely(x), x), b) || + rewrite(min(x, likely(x)), a) || + rewrite(min(likely_if_innermost(x), x), b) || + rewrite(min(x, likely_if_innermost(x)), a) || (no_overflow(op->type) && (rewrite(min(ramp(x, y, lanes), broadcast(z, lanes)), a, diff --git a/src/Simplify_Not.cpp b/src/Simplify_Not.cpp index 47b74661fd2c..1c855ae82dd9 100644 --- a/src/Simplify_Not.cpp +++ b/src/Simplify_Not.cpp @@ -18,8 +18,8 @@ Expr Simplify::visit(const Not *op, ExprInfo *info) { } if (rewrite(!broadcast(x, c0), broadcast(!x, c0)) || - rewrite(!intrin(Call::likely, x), intrin(Call::likely, !x)) || - rewrite(!intrin(Call::likely_if_innermost, x), intrin(Call::likely_if_innermost, !x)) || + rewrite(!likely(x), likely(!x)) || + rewrite(!likely_if_innermost(x), likely_if_innermost(!x)) || rewrite(!(!x && y), x || !y) || rewrite(!(!x || y), x && !y) || rewrite(!(x && !y), !x || y) || diff --git a/src/Simplify_Select.cpp b/src/Simplify_Select.cpp index 43da853b3e0a..1e778c229b95 100644 --- a/src/Simplify_Select.cpp +++ b/src/Simplify_Select.cpp @@ -21,17 +21,17 @@ Expr Simplify::visit(const Select *op, ExprInfo *info) { // clang-format off if (EVAL_IN_LAMBDA - (rewrite(select(IRMatcher::intrin(Call::likely, true), x, y), x) || - rewrite(select(IRMatcher::intrin(Call::likely, false), x, y), y) || - rewrite(select(IRMatcher::intrin(Call::likely_if_innermost, true), x, y), x) || - rewrite(select(IRMatcher::intrin(Call::likely_if_innermost, false), x, y), y) || + (rewrite(select(IRMatcher::likely(true), x, y), x) || + rewrite(select(IRMatcher::likely(false), x, y), y) || + rewrite(select(IRMatcher::likely_if_innermost(true), x, y), x) || + rewrite(select(IRMatcher::likely_if_innermost(false), x, y), y) || rewrite(select(1, x, y), x) || rewrite(select(0, x, y), y) || rewrite(select(x, y, y), y) || - rewrite(select(x, intrin(Call::likely, y), y), false_value) || - rewrite(select(x, y, intrin(Call::likely, y)), true_value) || - rewrite(select(x, intrin(Call::likely_if_innermost, y), y), false_value) || - rewrite(select(x, y, intrin(Call::likely_if_innermost, y)), true_value) || + rewrite(select(x, likely(y), y), false_value) || + rewrite(select(x, y, likely(y)), true_value) || + rewrite(select(x, likely_if_innermost(y), y), false_value) || + rewrite(select(x, y, likely_if_innermost(y)), true_value) || false)) { return rewrite.result; } diff --git a/src/SpirvIR.cpp b/src/SpirvIR.cpp index 761865d76b14..aae2ecc4b9e3 100644 --- a/src/SpirvIR.cpp +++ b/src/SpirvIR.cpp @@ -1,5 +1,6 @@ #include "SpirvIR.h" #include +#include #ifdef WITH_SPIRV @@ -1864,8 +1865,19 @@ SpvId SpvBuilder::declare_scalar_constant_of_type(const Type &scalar_type, const << "type=" << scalar_type << " " << "data=" << stringify_constant(value) << "\n"; - SpvInstruction inst = SpvFactory::constant(result_id, type_id, scalar_type.bytes(), &value, value_type); - module.add_constant(inst); + // small signed integers have to be sign extended to fit 32-bits + if constexpr (std::is_same::value) { + uint32_t sext_value = uint32_t(value); + SpvInstruction inst = SpvFactory::constant(result_id, type_id, sizeof(uint32_t), &sext_value, value_type); + module.add_constant(inst); + } else if constexpr (std::is_same::value) { + uint32_t sext_value = uint32_t(value); + SpvInstruction inst = SpvFactory::constant(result_id, type_id, sizeof(uint32_t), &sext_value, value_type); + module.add_constant(inst); + } else { + SpvInstruction inst = SpvFactory::constant(result_id, type_id, scalar_type.bytes(), &value, value_type); + module.add_constant(inst); + } constant_map[constant_key] = result_id; return result_id; } diff --git a/src/SplitTuples.cpp b/src/SplitTuples.cpp index b0c9a378b3ba..ee805634c151 100644 --- a/src/SplitTuples.cpp +++ b/src/SplitTuples.cpp @@ -36,29 +36,6 @@ class FindCallValueIndices : public IRVisitor { } }; -// Visitor and helper function to test if a piece of IR uses an extern image. -class UsesExternImage : public IRVisitor { - using IRVisitor::visit; - - void visit(const Call *c) override { - if (c->call_type == Call::Image) { - result = true; - } else { - IRVisitor::visit(c); - } - } - -public: - UsesExternImage() = default; - bool result = false; -}; - -inline bool uses_extern_image(const Stmt &s) { - UsesExternImage uses; - s.accept(&uses); - return uses.result; -} - class SplitTuples : public IRMutator { using IRMutator::visit; diff --git a/src/runtime/CMakeLists.txt b/src/runtime/CMakeLists.txt index aa17db86f4f9..e5ea0ce7c293 100644 --- a/src/runtime/CMakeLists.txt +++ b/src/runtime/CMakeLists.txt @@ -1,3 +1,9 @@ +find_package( + VulkanHeaders 1.3.296 REQUIRED + HINTS "${Halide_SOURCE_DIR}/dependencies/vulkan" +) +get_target_property(VulkanHeaders_INCLUDE_DIR Vulkan::Headers INTERFACE_INCLUDE_DIRECTORIES) + # Keep these lists in alphabetical order. set(RUNTIME_CPP aarch64_cpu_features @@ -177,6 +183,7 @@ set(RUNTIME_CXX_FLAGS -Wsign-compare -Wimplicit-fallthrough -Wno-sync-alignment + -isystem "${VulkanHeaders_INCLUDE_DIR}" ) Halide_feature( diff --git a/src/runtime/HalideRuntime.h b/src/runtime/HalideRuntime.h index fb0a1dbe87f8..d3fa4b79114b 100644 --- a/src/runtime/HalideRuntime.h +++ b/src/runtime/HalideRuntime.h @@ -23,7 +23,7 @@ // our CMake build, so that we ensure that the in-build metadata (eg soversion) // matches, but keeping the canonical version here makes it easier to keep // downstream build systems (eg Blaze/Bazel) properly in sync with the source. -#define HALIDE_VERSION_MAJOR 19 +#define HALIDE_VERSION_MAJOR 20 #define HALIDE_VERSION_MINOR 0 #define HALIDE_VERSION_PATCH 0 diff --git a/src/runtime/HalideRuntimeVulkan.h b/src/runtime/HalideRuntimeVulkan.h index 4e52da4fb034..e150b7c6d00b 100644 --- a/src/runtime/HalideRuntimeVulkan.h +++ b/src/runtime/HalideRuntimeVulkan.h @@ -17,6 +17,36 @@ extern "C" { #define HALIDE_RUNTIME_VULKAN +// Guard against redefining handles if vulkan.h was included elsewhere +#ifndef VK_DEFINE_HANDLE + +#define HALIDE_VULKAN_DEFINE_HANDLE(object) typedef struct object##_T *(object); + +#ifndef HALIDE_VULKAN_USE_64_BIT_PTR_DEFINES +#if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__)) || defined(_M_X64) || defined(__ia64) || defined(_M_IA64) || defined(__aarch64__) || defined(__powerpc64__) || (defined(__riscv) && __riscv_xlen == 64) +#define HALIDE_VULKAN_USE_64_BIT_PTR_DEFINES 1 +#else +#define HALIDE_VULKAN_USE_64_BIT_PTR_DEFINES 0 +#endif +#endif + +#ifndef HALIDE_VULKAN_DEFINE_NON_DISPATCHABLE_HANDLE +#if (HALIDE_VULKAN_USE_64_BIT_PTR_DEFINES == 1) +#define HALIDE_VULKAN_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *(object); +#else +#define HALIDE_VULKAN_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t(object); +#endif +#endif + +HALIDE_VULKAN_DEFINE_HANDLE(VkInstance) +HALIDE_VULKAN_DEFINE_HANDLE(VkPhysicalDevice) +HALIDE_VULKAN_DEFINE_HANDLE(VkDevice) +HALIDE_VULKAN_DEFINE_HANDLE(VkQueue) +HALIDE_VULKAN_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) +HALIDE_VULKAN_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT) + +#endif + extern const struct halide_device_interface_t *halide_vulkan_device_interface(); /** These are forward declared here to allow clients to override the @@ -56,26 +86,25 @@ extern void halide_vulkan_finalize_kernels(void *user_context, void *state_ptr); // instance: the vulkan instance handle // device: the vulkan device handle // physical_device: the vulkan physical device handle -// command_pool: the vulkan command pool handle (strangely doesn't have a VkCommandPool_T typedef) // queue: the vulkan queue handle // queue_family_index: the index corresponding to the device queue properties for the device (as described by vkGetPhysicalDeviceQueueFamilyProperties) // create: if set to true, attempt to create a new vulkan context, otherwise acquire the current one struct halide_vulkan_memory_allocator; extern int halide_vulkan_acquire_context(void *user_context, struct halide_vulkan_memory_allocator **allocator, - struct VkInstance_T **instance, - struct VkDevice_T **device, - struct VkPhysicalDevice_T **physical_device, - uint64_t *command_pool, - struct VkQueue_T **queue, + VkInstance *instance, + VkDevice *device, + VkPhysicalDevice *physical_device, + VkQueue *queue, uint32_t *queue_family_index, + VkDebugUtilsMessengerEXT *messenger, bool create = true); extern int halide_vulkan_release_context(void *user_context, - struct VkInstance_T *instance, - struct VkDevice_T *device, - struct VkQueue_T *queue); - + VkInstance instance, + VkDevice device, + VkQueue queue, + VkDebugUtilsMessengerEXT messenger); // -- // Override the default allocation callbacks (default uses Vulkan runtime implementation) diff --git a/src/runtime/internal/block_allocator.h b/src/runtime/internal/block_allocator.h index db07f677171e..37cacce601e3 100644 --- a/src/runtime/internal/block_allocator.h +++ b/src/runtime/internal/block_allocator.h @@ -24,7 +24,7 @@ namespace Internal { */ class BlockAllocator { public: - // disable copy constructors and assignment + // disable copy constructors and assignment for BlockAllocator(const BlockAllocator &) = delete; BlockAllocator &operator=(const BlockAllocator &) = delete; @@ -468,7 +468,9 @@ BlockAllocator::create_block_entry(void *user_context, const MemoryRequest &requ BlockEntry *block_entry = block_list.append(user_context); if (block_entry == nullptr) { +#ifdef DEBUG_RUNTIME_INTERNAL debug(user_context) << "BlockAllocator: Failed to allocate new block entry\n"; +#endif return nullptr; } @@ -491,7 +493,14 @@ BlockAllocator::create_block_entry(void *user_context, const MemoryRequest &requ block->memory.dedicated = block_request.dedicated; block->reserved = 0; block->allocator = create_region_allocator(user_context, block); - alloc_memory_block(user_context, block); + int error_code = alloc_memory_block(user_context, block); + if (error_code != 0) { +#ifdef DEBUG_RUNTIME_INTERNAL + debug(user_context) << "BlockAllocator: Failed to allocate memory block\n"; +#endif + destroy_block_entry(user_context, block_entry); + return nullptr; + } return block_entry; } @@ -520,9 +529,9 @@ int BlockAllocator::destroy_block_entry(void *user_context, BlockAllocator::Bloc destroy_region_allocator(user_context, block->allocator); block->allocator = nullptr; } - free_memory_block(user_context, block); + int error_code = free_memory_block(user_context, block); block_list.remove(user_context, block_entry); - return 0; + return error_code; } int BlockAllocator::alloc_memory_block(void *user_context, BlockResource *block) { @@ -531,9 +540,9 @@ int BlockAllocator::alloc_memory_block(void *user_context, BlockResource *block) #endif halide_abort_if_false(user_context, allocators.block.allocate != nullptr); MemoryBlock *memory_block = &(block->memory); - allocators.block.allocate(user_context, memory_block); + int error_code = allocators.block.allocate(user_context, memory_block); block->reserved = 0; - return 0; + return error_code; } int BlockAllocator::free_memory_block(void *user_context, BlockResource *block) { @@ -542,11 +551,11 @@ int BlockAllocator::free_memory_block(void *user_context, BlockResource *block) #endif halide_abort_if_false(user_context, allocators.block.deallocate != nullptr); MemoryBlock *memory_block = &(block->memory); - allocators.block.deallocate(user_context, memory_block); + int error_code = allocators.block.deallocate(user_context, memory_block); memory_block->handle = nullptr; block->reserved = 0; block->memory.size = 0; - return 0; + return error_code; } size_t BlockAllocator::constrain_requested_size(size_t size) const { diff --git a/src/runtime/internal/region_allocator.h b/src/runtime/internal/region_allocator.h index 6751dea2fa49..6f39991ff79c 100644 --- a/src/runtime/internal/region_allocator.h +++ b/src/runtime/internal/region_allocator.h @@ -633,8 +633,10 @@ int RegionAllocator::alloc_block_region(void *user_context, BlockRegion *block_r << ")"; #endif } - block_region->status = block_region->memory.dedicated ? AllocationStatus::Dedicated : AllocationStatus::InUse; - block->reserved += block_region->memory.size; + if (error_code == 0) { + block_region->status = block_region->memory.dedicated ? AllocationStatus::Dedicated : AllocationStatus::InUse; + block->reserved += block_region->memory.size; + } return error_code; } @@ -649,6 +651,7 @@ int RegionAllocator::free_block_region(void *user_context, BlockRegion *block_re << "usage_count=" << (uint32_t)block_region->usage_count << " " << "block_reserved=" << (uint32_t)block->reserved << ")"; #endif + int error_code = 0; if ((block_region->usage_count == 0) && (block_region->memory.handle != nullptr)) { #ifdef DEBUG_RUNTIME_INTERNAL debug(user_context) << " deallocating region (" @@ -661,12 +664,12 @@ int RegionAllocator::free_block_region(void *user_context, BlockRegion *block_re // NOTE: Deallocate but leave memory size as is, so that coalesce can compute region merging sizes halide_abort_if_false(user_context, allocators.region.deallocate != nullptr); MemoryRegion *memory_region = &(block_region->memory); - allocators.region.deallocate(user_context, memory_region); + error_code = allocators.region.deallocate(user_context, memory_region); block_region->memory.handle = nullptr; } block_region->usage_count = 0; block_region->status = AllocationStatus::Available; - return 0; + return error_code; } int RegionAllocator::release(void *user_context) { diff --git a/src/runtime/mini_vulkan.h b/src/runtime/mini_vulkan.h deleted file mode 100644 index c02f6044a308..000000000000 --- a/src/runtime/mini_vulkan.h +++ /dev/null @@ -1,6076 +0,0 @@ -#ifndef HALIDE_MINI_VULKAN_H -#define HALIDE_MINI_VULKAN_H - -/* -** Copyright (c) 2014-2017 The Khronos Group Inc. -** -** Licensed under the Apache License, Version 2.0 (the "License"); -** you may not use this file except in compliance with the License. -** You may obtain a copy of the License at -** -** http://www.apache.org/licenses/LICENSE-2.0 -** -** Unless required by applicable law or agreed to in writing, software -** distributed under the License is distributed on an "AS IS" BASIS, -** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -** See the License for the specific language governing permissions and -** limitations under the License. -*/ - -// Pickup integer types -#include "runtime_internal.h" - -#ifdef __cplusplus -extern "C" { -#endif // __cplusplus - -/* -*************************************************************************************************** -* Platform-specific directives and type declarations -*************************************************************************************************** -*/ - -/* Platform-specific calling convention macros. - * - * Platforms should define these so that Vulkan clients call Vulkan commands - * with the same calling conventions that the Vulkan implementation expects. - * - * VKAPI_ATTR - Placed before the return type in function declarations. - * Useful for C++11 and GCC/Clang-style function attribute syntax. - * VKAPI_CALL - Placed after the return type in function declarations. - * Useful for MSVC-style calling convention syntax. - * VKAPI_PTR - Placed between the '(' and '*' in function pointer types. - * - * Function declaration: VKAPI_ATTR void VKAPI_CALL vkCommand(void); - * Function pointer type: typedef void (VKAPI_PTR *PFN_vkCommand)(void); - */ -#if defined(_WIN32) -// On Windows, Vulkan commands use the stdcall convention -#define VKAPI_ATTR -#define VKAPI_CALL __stdcall -#define VKAPI_PTR VKAPI_CALL -#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH < 7 -#error "Vulkan isn't supported for the 'armeabi' NDK ABI" -#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH >= 7 && defined(__ARM_32BIT_STATE) -// On Android 32-bit ARM targets, Vulkan functions use the "hardfloat" -// calling convention, i.e. float parameters are passed in registers. This -// is true even if the rest of the application passes floats on the stack, -// as it does by default when compiling for the armeabi-v7a NDK ABI. -#define VKAPI_ATTR __attribute__((pcs("aapcs-vfp"))) -#define VKAPI_CALL -#define VKAPI_PTR VKAPI_ATTR -#else -// On other platforms, use the default calling convention -#define VKAPI_ATTR -#define VKAPI_CALL -#define VKAPI_PTR -#endif - -typedef uint32_t VkFlags; -typedef uint32_t VkBool32; -typedef uint64_t VkDeviceSize; -typedef uint32_t VkSampleMask; - -// Provided by VK_VERSION_1_0 -#define VK_API_VERSION_MAJOR(version) (((uint32_t)(version) >> 22) & 0x7FU) -#define VK_API_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU) -#define VK_API_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU) -#define VK_MAKE_API_VERSION(variant, major, minor, patch) \ - ((((uint32_t)(variant)) << 29) | (((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) -#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0) -#define VK_API_VERSION_1_1 VK_MAKE_API_VERSION(0, 1, 1, 0) -#define VK_API_VERSION_1_2 VK_MAKE_API_VERSION(0, 1, 2, 0) -#define VK_API_VERSION_1_3 VK_MAKE_API_VERSION(0, 1, 3, 0) - -#define VK_DEFINE_HANDLE(object) typedef struct object##_T *(object); -#define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object; - -VK_DEFINE_HANDLE(VkInstance) -VK_DEFINE_HANDLE(VkPhysicalDevice) -VK_DEFINE_HANDLE(VkDevice) -VK_DEFINE_HANDLE(VkQueue) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore) -VK_DEFINE_HANDLE(VkCommandBuffer) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool) - -#define VK_LOD_CLAMP_NONE 1000.0f -#define VK_REMAINING_MIP_LEVELS (~0U) -#define VK_REMAINING_ARRAY_LAYERS (~0U) -#define VK_WHOLE_SIZE (~0ULL) -#define VK_ATTACHMENT_UNUSED (~0U) -#define VK_TRUE 1 -#define VK_FALSE 0 -#define VK_QUEUE_FAMILY_IGNORED (~0U) -#define VK_SUBPASS_EXTERNAL (~0U) -#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE 256 -#define VK_UUID_SIZE 16 -#define VK_MAX_MEMORY_TYPES 32 -#define VK_MAX_MEMORY_HEAPS 16 -#define VK_MAX_EXTENSION_NAME_SIZE 256 -#define VK_MAX_DESCRIPTION_SIZE 256 - -typedef enum VkPipelineCacheHeaderVersion { - VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1, - VK_PIPELINE_CACHE_HEADER_VERSION_BEGIN_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, - VK_PIPELINE_CACHE_HEADER_VERSION_END_RANGE = VK_PIPELINE_CACHE_HEADER_VERSION_ONE, - VK_PIPELINE_CACHE_HEADER_VERSION_RANGE_SIZE = (VK_PIPELINE_CACHE_HEADER_VERSION_ONE - VK_PIPELINE_CACHE_HEADER_VERSION_ONE + 1), // NOLINT: misc-redundant-expression - VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF -} VkPipelineCacheHeaderVersion; - -typedef enum VkResult { - VK_SUCCESS = 0, - VK_NOT_READY = 1, - VK_TIMEOUT = 2, - VK_EVENT_SET = 3, - VK_EVENT_RESET = 4, - VK_INCOMPLETE = 5, - VK_ERROR_OUT_OF_HOST_MEMORY = -1, - VK_ERROR_OUT_OF_DEVICE_MEMORY = -2, - VK_ERROR_INITIALIZATION_FAILED = -3, - VK_ERROR_DEVICE_LOST = -4, - VK_ERROR_MEMORY_MAP_FAILED = -5, - VK_ERROR_LAYER_NOT_PRESENT = -6, - VK_ERROR_EXTENSION_NOT_PRESENT = -7, - VK_ERROR_FEATURE_NOT_PRESENT = -8, - VK_ERROR_INCOMPATIBLE_DRIVER = -9, - VK_ERROR_TOO_MANY_OBJECTS = -10, - VK_ERROR_FORMAT_NOT_SUPPORTED = -11, - VK_ERROR_FRAGMENTED_POOL = -12, - VK_ERROR_SURFACE_LOST_KHR = -1000000000, - VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001, - VK_SUBOPTIMAL_KHR = 1000001003, - VK_ERROR_OUT_OF_DATE_KHR = -1000001004, - VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001, - VK_ERROR_VALIDATION_FAILED_EXT = -1000011001, - VK_ERROR_INVALID_SHADER_NV = -1000012000, - VK_ERROR_OUT_OF_POOL_MEMORY_KHR = -1000069000, - VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = -1000072003, - VK_RESULT_BEGIN_RANGE = VK_ERROR_FRAGMENTED_POOL, - VK_RESULT_END_RANGE = VK_INCOMPLETE, - VK_RESULT_RANGE_SIZE = (VK_INCOMPLETE - VK_ERROR_FRAGMENTED_POOL + 1), // NOLINT: misc-redundant-expression - VK_RESULT_MAX_ENUM = 0x7FFFFFFF -} VkResult; - -typedef enum VkStructureType { - VK_STRUCTURE_TYPE_APPLICATION_INFO = 0, - VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1, - VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2, - VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3, - VK_STRUCTURE_TYPE_SUBMIT_INFO = 4, - VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5, - VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6, - VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7, - VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8, - VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9, - VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10, - VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11, - VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12, - VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13, - VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14, - VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15, - VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16, - VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17, - VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18, - VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19, - VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20, - VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21, - VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22, - VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23, - VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24, - VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25, - VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26, - VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27, - VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28, - VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29, - VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30, - VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31, - VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32, - VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33, - VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34, - VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35, - VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36, - VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37, - VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38, - VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39, - VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40, - VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41, - VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42, - VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43, - VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44, - VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45, - VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46, - VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47, - VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48, - VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000, - VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001, - VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000, - VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001, - VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000, - VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000, - VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000, - VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000, - VK_STRUCTURE_TYPE_MIR_SURFACE_CREATE_INFO_KHR = 1000007000, - VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000, - VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000, - VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000, - VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000, - VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000, - VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001, - VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002, - VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000, - VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001, - VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002, - VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000, - VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHX = 1000053000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHX = 1000053001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHX = 1000053002, - VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000, - VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001, - VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000, - VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001, - VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = 1000059000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = 1000059001, - VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = 1000059002, - VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = 1000059003, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = 1000059004, - VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = 1000059005, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = 1000059006, - VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = 1000059007, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = 1000059008, - VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHX = 1000060000, - VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHX = 1000060001, - VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHX = 1000060002, - VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHX = 1000060003, - VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHX = 1000060004, - VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHX = 1000060005, - VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHX = 1000060006, - VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHX = 1000060010, - VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHX = 1000060007, - VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHX = 1000060008, - VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHX = 1000060009, - VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHX = 1000060011, - VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHX = 1000060012, - VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000, - VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHX = 1000070000, - VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHX = 1000070001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = 1000071000, - VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = 1000071001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = 1000071002, - VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = 1000071003, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = 1000071004, - VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = 1000072000, - VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = 1000072001, - VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = 1000072002, - VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000, - VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001, - VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002, - VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003, - VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000, - VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001, - VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002, - VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = 1000076000, - VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = 1000076001, - VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = 1000077000, - VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000, - VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001, - VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002, - VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003, - VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000, - VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = 1000083000, - VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000, - VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = 1000085000, - VK_STRUCTURE_TYPE_OBJECT_TABLE_CREATE_INFO_NVX = 1000086000, - VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NVX = 1000086001, - VK_STRUCTURE_TYPE_CMD_PROCESS_COMMANDS_INFO_NVX = 1000086002, - VK_STRUCTURE_TYPE_CMD_RESERVE_SPACE_FOR_COMMANDS_INFO_NVX = 1000086003, - VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_LIMITS_NVX = 1000086004, - VK_STRUCTURE_TYPE_DEVICE_GENERATED_COMMANDS_FEATURES_NVX = 1000086005, - VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000, - VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000, - VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000, - VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001, - VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002, - VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003, - VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000, - VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000, - VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001, - VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000, - VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = 1000112000, - VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = 1000112001, - VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = 1000113000, - VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000, - VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001, - VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002, - VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000, - VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000, - VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001, - VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = 1000120000, - VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000, - VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000, - VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = 1000127000, - VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = 1000127001, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = 1000130000, - VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = 1000130001, - VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000, - VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001, - VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003, - VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004, - VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = 1000146000, - VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = 1000146001, - VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = 1000146002, - VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = 1000146003, - VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = 1000146004, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000, - VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001, - VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002, - VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000, - VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000, - VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000, - VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001, - VK_STRUCTURE_TYPE_BEGIN_RANGE = VK_STRUCTURE_TYPE_APPLICATION_INFO, - VK_STRUCTURE_TYPE_END_RANGE = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO, - VK_STRUCTURE_TYPE_RANGE_SIZE = (VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO - VK_STRUCTURE_TYPE_APPLICATION_INFO + 1), // NOLINT: misc-redundant-expression - VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkStructureType; - -typedef enum VkSystemAllocationScope { - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0, - VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1, - VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2, - VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3, - VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4, - VK_SYSTEM_ALLOCATION_SCOPE_BEGIN_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND, - VK_SYSTEM_ALLOCATION_SCOPE_END_RANGE = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE, - VK_SYSTEM_ALLOCATION_SCOPE_RANGE_SIZE = (VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE - VK_SYSTEM_ALLOCATION_SCOPE_COMMAND + 1), // NOLINT: misc-redundant-expression - VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF -} VkSystemAllocationScope; - -typedef enum VkInternalAllocationType { - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0, - VK_INTERNAL_ALLOCATION_TYPE_BEGIN_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, - VK_INTERNAL_ALLOCATION_TYPE_END_RANGE = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE, - VK_INTERNAL_ALLOCATION_TYPE_RANGE_SIZE = (VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE - VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE + 1), // NOLINT: misc-redundant-expression - VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkInternalAllocationType; - -typedef enum VkFormat { - VK_FORMAT_UNDEFINED = 0, - VK_FORMAT_R4G4_UNORM_PACK8 = 1, - VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2, - VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3, - VK_FORMAT_R5G6B5_UNORM_PACK16 = 4, - VK_FORMAT_B5G6R5_UNORM_PACK16 = 5, - VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6, - VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7, - VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8, - VK_FORMAT_R8_UNORM = 9, - VK_FORMAT_R8_SNORM = 10, - VK_FORMAT_R8_USCALED = 11, - VK_FORMAT_R8_SSCALED = 12, - VK_FORMAT_R8_UINT = 13, - VK_FORMAT_R8_SINT = 14, - VK_FORMAT_R8_SRGB = 15, - VK_FORMAT_R8G8_UNORM = 16, - VK_FORMAT_R8G8_SNORM = 17, - VK_FORMAT_R8G8_USCALED = 18, - VK_FORMAT_R8G8_SSCALED = 19, - VK_FORMAT_R8G8_UINT = 20, - VK_FORMAT_R8G8_SINT = 21, - VK_FORMAT_R8G8_SRGB = 22, - VK_FORMAT_R8G8B8_UNORM = 23, - VK_FORMAT_R8G8B8_SNORM = 24, - VK_FORMAT_R8G8B8_USCALED = 25, - VK_FORMAT_R8G8B8_SSCALED = 26, - VK_FORMAT_R8G8B8_UINT = 27, - VK_FORMAT_R8G8B8_SINT = 28, - VK_FORMAT_R8G8B8_SRGB = 29, - VK_FORMAT_B8G8R8_UNORM = 30, - VK_FORMAT_B8G8R8_SNORM = 31, - VK_FORMAT_B8G8R8_USCALED = 32, - VK_FORMAT_B8G8R8_SSCALED = 33, - VK_FORMAT_B8G8R8_UINT = 34, - VK_FORMAT_B8G8R8_SINT = 35, - VK_FORMAT_B8G8R8_SRGB = 36, - VK_FORMAT_R8G8B8A8_UNORM = 37, - VK_FORMAT_R8G8B8A8_SNORM = 38, - VK_FORMAT_R8G8B8A8_USCALED = 39, - VK_FORMAT_R8G8B8A8_SSCALED = 40, - VK_FORMAT_R8G8B8A8_UINT = 41, - VK_FORMAT_R8G8B8A8_SINT = 42, - VK_FORMAT_R8G8B8A8_SRGB = 43, - VK_FORMAT_B8G8R8A8_UNORM = 44, - VK_FORMAT_B8G8R8A8_SNORM = 45, - VK_FORMAT_B8G8R8A8_USCALED = 46, - VK_FORMAT_B8G8R8A8_SSCALED = 47, - VK_FORMAT_B8G8R8A8_UINT = 48, - VK_FORMAT_B8G8R8A8_SINT = 49, - VK_FORMAT_B8G8R8A8_SRGB = 50, - VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51, - VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52, - VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53, - VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54, - VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55, - VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56, - VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57, - VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58, - VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59, - VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60, - VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61, - VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62, - VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63, - VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64, - VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65, - VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66, - VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67, - VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68, - VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69, - VK_FORMAT_R16_UNORM = 70, - VK_FORMAT_R16_SNORM = 71, - VK_FORMAT_R16_USCALED = 72, - VK_FORMAT_R16_SSCALED = 73, - VK_FORMAT_R16_UINT = 74, - VK_FORMAT_R16_SINT = 75, - VK_FORMAT_R16_SFLOAT = 76, - VK_FORMAT_R16G16_UNORM = 77, - VK_FORMAT_R16G16_SNORM = 78, - VK_FORMAT_R16G16_USCALED = 79, - VK_FORMAT_R16G16_SSCALED = 80, - VK_FORMAT_R16G16_UINT = 81, - VK_FORMAT_R16G16_SINT = 82, - VK_FORMAT_R16G16_SFLOAT = 83, - VK_FORMAT_R16G16B16_UNORM = 84, - VK_FORMAT_R16G16B16_SNORM = 85, - VK_FORMAT_R16G16B16_USCALED = 86, - VK_FORMAT_R16G16B16_SSCALED = 87, - VK_FORMAT_R16G16B16_UINT = 88, - VK_FORMAT_R16G16B16_SINT = 89, - VK_FORMAT_R16G16B16_SFLOAT = 90, - VK_FORMAT_R16G16B16A16_UNORM = 91, - VK_FORMAT_R16G16B16A16_SNORM = 92, - VK_FORMAT_R16G16B16A16_USCALED = 93, - VK_FORMAT_R16G16B16A16_SSCALED = 94, - VK_FORMAT_R16G16B16A16_UINT = 95, - VK_FORMAT_R16G16B16A16_SINT = 96, - VK_FORMAT_R16G16B16A16_SFLOAT = 97, - VK_FORMAT_R32_UINT = 98, - VK_FORMAT_R32_SINT = 99, - VK_FORMAT_R32_SFLOAT = 100, - VK_FORMAT_R32G32_UINT = 101, - VK_FORMAT_R32G32_SINT = 102, - VK_FORMAT_R32G32_SFLOAT = 103, - VK_FORMAT_R32G32B32_UINT = 104, - VK_FORMAT_R32G32B32_SINT = 105, - VK_FORMAT_R32G32B32_SFLOAT = 106, - VK_FORMAT_R32G32B32A32_UINT = 107, - VK_FORMAT_R32G32B32A32_SINT = 108, - VK_FORMAT_R32G32B32A32_SFLOAT = 109, - VK_FORMAT_R64_UINT = 110, - VK_FORMAT_R64_SINT = 111, - VK_FORMAT_R64_SFLOAT = 112, - VK_FORMAT_R64G64_UINT = 113, - VK_FORMAT_R64G64_SINT = 114, - VK_FORMAT_R64G64_SFLOAT = 115, - VK_FORMAT_R64G64B64_UINT = 116, - VK_FORMAT_R64G64B64_SINT = 117, - VK_FORMAT_R64G64B64_SFLOAT = 118, - VK_FORMAT_R64G64B64A64_UINT = 119, - VK_FORMAT_R64G64B64A64_SINT = 120, - VK_FORMAT_R64G64B64A64_SFLOAT = 121, - VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122, - VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123, - VK_FORMAT_D16_UNORM = 124, - VK_FORMAT_X8_D24_UNORM_PACK32 = 125, - VK_FORMAT_D32_SFLOAT = 126, - VK_FORMAT_S8_UINT = 127, - VK_FORMAT_D16_UNORM_S8_UINT = 128, - VK_FORMAT_D24_UNORM_S8_UINT = 129, - VK_FORMAT_D32_SFLOAT_S8_UINT = 130, - VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131, - VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132, - VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133, - VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134, - VK_FORMAT_BC2_UNORM_BLOCK = 135, - VK_FORMAT_BC2_SRGB_BLOCK = 136, - VK_FORMAT_BC3_UNORM_BLOCK = 137, - VK_FORMAT_BC3_SRGB_BLOCK = 138, - VK_FORMAT_BC4_UNORM_BLOCK = 139, - VK_FORMAT_BC4_SNORM_BLOCK = 140, - VK_FORMAT_BC5_UNORM_BLOCK = 141, - VK_FORMAT_BC5_SNORM_BLOCK = 142, - VK_FORMAT_BC6H_UFLOAT_BLOCK = 143, - VK_FORMAT_BC6H_SFLOAT_BLOCK = 144, - VK_FORMAT_BC7_UNORM_BLOCK = 145, - VK_FORMAT_BC7_SRGB_BLOCK = 146, - VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147, - VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148, - VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149, - VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150, - VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151, - VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152, - VK_FORMAT_EAC_R11_UNORM_BLOCK = 153, - VK_FORMAT_EAC_R11_SNORM_BLOCK = 154, - VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155, - VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156, - VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157, - VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158, - VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159, - VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160, - VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161, - VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162, - VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163, - VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164, - VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165, - VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166, - VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167, - VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168, - VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169, - VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170, - VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171, - VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172, - VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173, - VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174, - VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175, - VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176, - VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177, - VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178, - VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179, - VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180, - VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181, - VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182, - VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183, - VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184, - VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000, - VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001, - VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002, - VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003, - VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004, - VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005, - VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006, - VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007, - VK_FORMAT_BEGIN_RANGE = VK_FORMAT_UNDEFINED, - VK_FORMAT_END_RANGE = VK_FORMAT_ASTC_12x12_SRGB_BLOCK, - VK_FORMAT_RANGE_SIZE = (VK_FORMAT_ASTC_12x12_SRGB_BLOCK - VK_FORMAT_UNDEFINED + 1), // NOLINT: misc-redundant-expression - VK_FORMAT_MAX_ENUM = 0x7FFFFFFF -} VkFormat; - -typedef enum VkImageType { - VK_IMAGE_TYPE_1D = 0, - VK_IMAGE_TYPE_2D = 1, - VK_IMAGE_TYPE_3D = 2, - VK_IMAGE_TYPE_BEGIN_RANGE = VK_IMAGE_TYPE_1D, - VK_IMAGE_TYPE_END_RANGE = VK_IMAGE_TYPE_3D, - VK_IMAGE_TYPE_RANGE_SIZE = (VK_IMAGE_TYPE_3D - VK_IMAGE_TYPE_1D + 1), // NOLINT: misc-redundant-expression - VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkImageType; - -typedef enum VkImageTiling { - VK_IMAGE_TILING_OPTIMAL = 0, - VK_IMAGE_TILING_LINEAR = 1, - VK_IMAGE_TILING_BEGIN_RANGE = VK_IMAGE_TILING_OPTIMAL, - VK_IMAGE_TILING_END_RANGE = VK_IMAGE_TILING_LINEAR, - VK_IMAGE_TILING_RANGE_SIZE = (VK_IMAGE_TILING_LINEAR - VK_IMAGE_TILING_OPTIMAL + 1), // NOLINT: misc-redundant-expression - VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF -} VkImageTiling; - -typedef enum VkPhysicalDeviceType { - VK_PHYSICAL_DEVICE_TYPE_OTHER = 0, - VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1, - VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2, - VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3, - VK_PHYSICAL_DEVICE_TYPE_CPU = 4, - VK_PHYSICAL_DEVICE_TYPE_BEGIN_RANGE = VK_PHYSICAL_DEVICE_TYPE_OTHER, - VK_PHYSICAL_DEVICE_TYPE_END_RANGE = VK_PHYSICAL_DEVICE_TYPE_CPU, - VK_PHYSICAL_DEVICE_TYPE_RANGE_SIZE = (VK_PHYSICAL_DEVICE_TYPE_CPU - VK_PHYSICAL_DEVICE_TYPE_OTHER + 1), // NOLINT: misc-redundant-expression - VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkPhysicalDeviceType; - -typedef enum VkQueryType { - VK_QUERY_TYPE_OCCLUSION = 0, - VK_QUERY_TYPE_PIPELINE_STATISTICS = 1, - VK_QUERY_TYPE_TIMESTAMP = 2, - VK_QUERY_TYPE_BEGIN_RANGE = VK_QUERY_TYPE_OCCLUSION, - VK_QUERY_TYPE_END_RANGE = VK_QUERY_TYPE_TIMESTAMP, - VK_QUERY_TYPE_RANGE_SIZE = (VK_QUERY_TYPE_TIMESTAMP - VK_QUERY_TYPE_OCCLUSION + 1), // NOLINT: misc-redundant-expression - VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkQueryType; - -typedef enum VkSharingMode { - VK_SHARING_MODE_EXCLUSIVE = 0, - VK_SHARING_MODE_CONCURRENT = 1, - VK_SHARING_MODE_BEGIN_RANGE = VK_SHARING_MODE_EXCLUSIVE, - VK_SHARING_MODE_END_RANGE = VK_SHARING_MODE_CONCURRENT, - VK_SHARING_MODE_RANGE_SIZE = (VK_SHARING_MODE_CONCURRENT - VK_SHARING_MODE_EXCLUSIVE + 1), // NOLINT: misc-redundant-expression - VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF -} VkSharingMode; - -typedef enum VkImageLayout { - VK_IMAGE_LAYOUT_UNDEFINED = 0, - VK_IMAGE_LAYOUT_GENERAL = 1, - VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3, - VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4, - VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5, - VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6, - VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7, - VK_IMAGE_LAYOUT_PREINITIALIZED = 8, - VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002, - VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000, - VK_IMAGE_LAYOUT_BEGIN_RANGE = VK_IMAGE_LAYOUT_UNDEFINED, - VK_IMAGE_LAYOUT_END_RANGE = VK_IMAGE_LAYOUT_PREINITIALIZED, - VK_IMAGE_LAYOUT_RANGE_SIZE = (VK_IMAGE_LAYOUT_PREINITIALIZED - VK_IMAGE_LAYOUT_UNDEFINED + 1), // NOLINT: misc-redundant-expression - VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF -} VkImageLayout; - -typedef enum VkImageViewType { - VK_IMAGE_VIEW_TYPE_1D = 0, - VK_IMAGE_VIEW_TYPE_2D = 1, - VK_IMAGE_VIEW_TYPE_3D = 2, - VK_IMAGE_VIEW_TYPE_CUBE = 3, - VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4, - VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5, - VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6, - VK_IMAGE_VIEW_TYPE_BEGIN_RANGE = VK_IMAGE_VIEW_TYPE_1D, - VK_IMAGE_VIEW_TYPE_END_RANGE = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY, - VK_IMAGE_VIEW_TYPE_RANGE_SIZE = (VK_IMAGE_VIEW_TYPE_CUBE_ARRAY - VK_IMAGE_VIEW_TYPE_1D + 1), // NOLINT: misc-redundant-expression - VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkImageViewType; - -typedef enum VkComponentSwizzle { - VK_COMPONENT_SWIZZLE_IDENTITY = 0, - VK_COMPONENT_SWIZZLE_ZERO = 1, - VK_COMPONENT_SWIZZLE_ONE = 2, - VK_COMPONENT_SWIZZLE_R = 3, - VK_COMPONENT_SWIZZLE_G = 4, - VK_COMPONENT_SWIZZLE_B = 5, - VK_COMPONENT_SWIZZLE_A = 6, - VK_COMPONENT_SWIZZLE_BEGIN_RANGE = VK_COMPONENT_SWIZZLE_IDENTITY, - VK_COMPONENT_SWIZZLE_END_RANGE = VK_COMPONENT_SWIZZLE_A, - VK_COMPONENT_SWIZZLE_RANGE_SIZE = (VK_COMPONENT_SWIZZLE_A - VK_COMPONENT_SWIZZLE_IDENTITY + 1), // NOLINT: misc-redundant-expression - VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF -} VkComponentSwizzle; - -typedef enum VkVertexInputRate { - VK_VERTEX_INPUT_RATE_VERTEX = 0, - VK_VERTEX_INPUT_RATE_INSTANCE = 1, - VK_VERTEX_INPUT_RATE_BEGIN_RANGE = VK_VERTEX_INPUT_RATE_VERTEX, - VK_VERTEX_INPUT_RATE_END_RANGE = VK_VERTEX_INPUT_RATE_INSTANCE, - VK_VERTEX_INPUT_RATE_RANGE_SIZE = (VK_VERTEX_INPUT_RATE_INSTANCE - VK_VERTEX_INPUT_RATE_VERTEX + 1), // NOLINT: misc-redundant-expression - VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF -} VkVertexInputRate; - -typedef enum VkPrimitiveTopology { - VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0, - VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1, - VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5, - VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6, - VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8, - VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9, - VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10, - VK_PRIMITIVE_TOPOLOGY_BEGIN_RANGE = VK_PRIMITIVE_TOPOLOGY_POINT_LIST, - VK_PRIMITIVE_TOPOLOGY_END_RANGE = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST, - VK_PRIMITIVE_TOPOLOGY_RANGE_SIZE = (VK_PRIMITIVE_TOPOLOGY_PATCH_LIST - VK_PRIMITIVE_TOPOLOGY_POINT_LIST + 1), // NOLINT: misc-redundant-expression - VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF -} VkPrimitiveTopology; - -typedef enum VkPolygonMode { - VK_POLYGON_MODE_FILL = 0, - VK_POLYGON_MODE_LINE = 1, - VK_POLYGON_MODE_POINT = 2, - VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000, - VK_POLYGON_MODE_BEGIN_RANGE = VK_POLYGON_MODE_FILL, - VK_POLYGON_MODE_END_RANGE = VK_POLYGON_MODE_POINT, - VK_POLYGON_MODE_RANGE_SIZE = (VK_POLYGON_MODE_POINT - VK_POLYGON_MODE_FILL + 1), // NOLINT: misc-redundant-expression - VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF -} VkPolygonMode; - -typedef enum VkFrontFace { - VK_FRONT_FACE_COUNTER_CLOCKWISE = 0, - VK_FRONT_FACE_CLOCKWISE = 1, - VK_FRONT_FACE_BEGIN_RANGE = VK_FRONT_FACE_COUNTER_CLOCKWISE, - VK_FRONT_FACE_END_RANGE = VK_FRONT_FACE_CLOCKWISE, - VK_FRONT_FACE_RANGE_SIZE = (VK_FRONT_FACE_CLOCKWISE - VK_FRONT_FACE_COUNTER_CLOCKWISE + 1), // NOLINT: misc-redundant-expression - VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF -} VkFrontFace; - -typedef enum VkCompareOp { - VK_COMPARE_OP_NEVER = 0, - VK_COMPARE_OP_LESS = 1, - VK_COMPARE_OP_EQUAL = 2, - VK_COMPARE_OP_LESS_OR_EQUAL = 3, - VK_COMPARE_OP_GREATER = 4, - VK_COMPARE_OP_NOT_EQUAL = 5, - VK_COMPARE_OP_GREATER_OR_EQUAL = 6, - VK_COMPARE_OP_ALWAYS = 7, - VK_COMPARE_OP_BEGIN_RANGE = VK_COMPARE_OP_NEVER, - VK_COMPARE_OP_END_RANGE = VK_COMPARE_OP_ALWAYS, - VK_COMPARE_OP_RANGE_SIZE = (VK_COMPARE_OP_ALWAYS - VK_COMPARE_OP_NEVER + 1), // NOLINT: misc-redundant-expression - VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF -} VkCompareOp; - -typedef enum VkStencilOp { - VK_STENCIL_OP_KEEP = 0, - VK_STENCIL_OP_ZERO = 1, - VK_STENCIL_OP_REPLACE = 2, - VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3, - VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4, - VK_STENCIL_OP_INVERT = 5, - VK_STENCIL_OP_INCREMENT_AND_WRAP = 6, - VK_STENCIL_OP_DECREMENT_AND_WRAP = 7, - VK_STENCIL_OP_BEGIN_RANGE = VK_STENCIL_OP_KEEP, - VK_STENCIL_OP_END_RANGE = VK_STENCIL_OP_DECREMENT_AND_WRAP, - VK_STENCIL_OP_RANGE_SIZE = (VK_STENCIL_OP_DECREMENT_AND_WRAP - VK_STENCIL_OP_KEEP + 1), // NOLINT: misc-redundant-expression - VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF -} VkStencilOp; - -typedef enum VkLogicOp { - VK_LOGIC_OP_CLEAR = 0, - VK_LOGIC_OP_AND = 1, - VK_LOGIC_OP_AND_REVERSE = 2, - VK_LOGIC_OP_COPY = 3, - VK_LOGIC_OP_AND_INVERTED = 4, - VK_LOGIC_OP_NO_OP = 5, - VK_LOGIC_OP_XOR = 6, - VK_LOGIC_OP_OR = 7, - VK_LOGIC_OP_NOR = 8, - VK_LOGIC_OP_EQUIVALENT = 9, - VK_LOGIC_OP_INVERT = 10, - VK_LOGIC_OP_OR_REVERSE = 11, - VK_LOGIC_OP_COPY_INVERTED = 12, - VK_LOGIC_OP_OR_INVERTED = 13, - VK_LOGIC_OP_NAND = 14, - VK_LOGIC_OP_SET = 15, - VK_LOGIC_OP_BEGIN_RANGE = VK_LOGIC_OP_CLEAR, - VK_LOGIC_OP_END_RANGE = VK_LOGIC_OP_SET, - VK_LOGIC_OP_RANGE_SIZE = (VK_LOGIC_OP_SET - VK_LOGIC_OP_CLEAR + 1), // NOLINT: misc-redundant-expression - VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF -} VkLogicOp; - -typedef enum VkBlendFactor { - VK_BLEND_FACTOR_ZERO = 0, - VK_BLEND_FACTOR_ONE = 1, - VK_BLEND_FACTOR_SRC_COLOR = 2, - VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3, - VK_BLEND_FACTOR_DST_COLOR = 4, - VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5, - VK_BLEND_FACTOR_SRC_ALPHA = 6, - VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7, - VK_BLEND_FACTOR_DST_ALPHA = 8, - VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9, - VK_BLEND_FACTOR_CONSTANT_COLOR = 10, - VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11, - VK_BLEND_FACTOR_CONSTANT_ALPHA = 12, - VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13, - VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14, - VK_BLEND_FACTOR_SRC1_COLOR = 15, - VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16, - VK_BLEND_FACTOR_SRC1_ALPHA = 17, - VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18, - VK_BLEND_FACTOR_BEGIN_RANGE = VK_BLEND_FACTOR_ZERO, - VK_BLEND_FACTOR_END_RANGE = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA, - VK_BLEND_FACTOR_RANGE_SIZE = (VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA - VK_BLEND_FACTOR_ZERO + 1), // NOLINT: misc-redundant-expression - VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF -} VkBlendFactor; - -typedef enum VkBlendOp { - VK_BLEND_OP_ADD = 0, - VK_BLEND_OP_SUBTRACT = 1, - VK_BLEND_OP_REVERSE_SUBTRACT = 2, - VK_BLEND_OP_MIN = 3, - VK_BLEND_OP_MAX = 4, - VK_BLEND_OP_ZERO_EXT = 1000148000, - VK_BLEND_OP_SRC_EXT = 1000148001, - VK_BLEND_OP_DST_EXT = 1000148002, - VK_BLEND_OP_SRC_OVER_EXT = 1000148003, - VK_BLEND_OP_DST_OVER_EXT = 1000148004, - VK_BLEND_OP_SRC_IN_EXT = 1000148005, - VK_BLEND_OP_DST_IN_EXT = 1000148006, - VK_BLEND_OP_SRC_OUT_EXT = 1000148007, - VK_BLEND_OP_DST_OUT_EXT = 1000148008, - VK_BLEND_OP_SRC_ATOP_EXT = 1000148009, - VK_BLEND_OP_DST_ATOP_EXT = 1000148010, - VK_BLEND_OP_XOR_EXT = 1000148011, - VK_BLEND_OP_MULTIPLY_EXT = 1000148012, - VK_BLEND_OP_SCREEN_EXT = 1000148013, - VK_BLEND_OP_OVERLAY_EXT = 1000148014, - VK_BLEND_OP_DARKEN_EXT = 1000148015, - VK_BLEND_OP_LIGHTEN_EXT = 1000148016, - VK_BLEND_OP_COLORDODGE_EXT = 1000148017, - VK_BLEND_OP_COLORBURN_EXT = 1000148018, - VK_BLEND_OP_HARDLIGHT_EXT = 1000148019, - VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020, - VK_BLEND_OP_DIFFERENCE_EXT = 1000148021, - VK_BLEND_OP_EXCLUSION_EXT = 1000148022, - VK_BLEND_OP_INVERT_EXT = 1000148023, - VK_BLEND_OP_INVERT_RGB_EXT = 1000148024, - VK_BLEND_OP_LINEARDODGE_EXT = 1000148025, - VK_BLEND_OP_LINEARBURN_EXT = 1000148026, - VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027, - VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028, - VK_BLEND_OP_PINLIGHT_EXT = 1000148029, - VK_BLEND_OP_HARDMIX_EXT = 1000148030, - VK_BLEND_OP_HSL_HUE_EXT = 1000148031, - VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032, - VK_BLEND_OP_HSL_COLOR_EXT = 1000148033, - VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034, - VK_BLEND_OP_PLUS_EXT = 1000148035, - VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036, - VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037, - VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038, - VK_BLEND_OP_MINUS_EXT = 1000148039, - VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040, - VK_BLEND_OP_CONTRAST_EXT = 1000148041, - VK_BLEND_OP_INVERT_OVG_EXT = 1000148042, - VK_BLEND_OP_RED_EXT = 1000148043, - VK_BLEND_OP_GREEN_EXT = 1000148044, - VK_BLEND_OP_BLUE_EXT = 1000148045, - VK_BLEND_OP_BEGIN_RANGE = VK_BLEND_OP_ADD, - VK_BLEND_OP_END_RANGE = VK_BLEND_OP_MAX, - VK_BLEND_OP_RANGE_SIZE = (VK_BLEND_OP_MAX - VK_BLEND_OP_ADD + 1), // NOLINT: misc-redundant-expression - VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF -} VkBlendOp; - -typedef enum VkDynamicState { - VK_DYNAMIC_STATE_VIEWPORT = 0, - VK_DYNAMIC_STATE_SCISSOR = 1, - VK_DYNAMIC_STATE_LINE_WIDTH = 2, - VK_DYNAMIC_STATE_DEPTH_BIAS = 3, - VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4, - VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5, - VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6, - VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7, - VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8, - VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000, - VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000, - VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000, - VK_DYNAMIC_STATE_BEGIN_RANGE = VK_DYNAMIC_STATE_VIEWPORT, - VK_DYNAMIC_STATE_END_RANGE = VK_DYNAMIC_STATE_STENCIL_REFERENCE, - VK_DYNAMIC_STATE_RANGE_SIZE = (VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1), // NOLINT: misc-redundant-expression - VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF -} VkDynamicState; - -typedef enum VkFilter { - VK_FILTER_NEAREST = 0, - VK_FILTER_LINEAR = 1, - VK_FILTER_CUBIC_IMG = 1000015000, - VK_FILTER_BEGIN_RANGE = VK_FILTER_NEAREST, - VK_FILTER_END_RANGE = VK_FILTER_LINEAR, - VK_FILTER_RANGE_SIZE = (VK_FILTER_LINEAR - VK_FILTER_NEAREST + 1), // NOLINT: misc-redundant-expression - VK_FILTER_MAX_ENUM = 0x7FFFFFFF -} VkFilter; - -typedef enum VkSamplerMipmapMode { - VK_SAMPLER_MIPMAP_MODE_NEAREST = 0, - VK_SAMPLER_MIPMAP_MODE_LINEAR = 1, - VK_SAMPLER_MIPMAP_MODE_BEGIN_RANGE = VK_SAMPLER_MIPMAP_MODE_NEAREST, - VK_SAMPLER_MIPMAP_MODE_END_RANGE = VK_SAMPLER_MIPMAP_MODE_LINEAR, - VK_SAMPLER_MIPMAP_MODE_RANGE_SIZE = (VK_SAMPLER_MIPMAP_MODE_LINEAR - VK_SAMPLER_MIPMAP_MODE_NEAREST + 1), // NOLINT: misc-redundant-expression - VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF -} VkSamplerMipmapMode; - -typedef enum VkSamplerAddressMode { - VK_SAMPLER_ADDRESS_MODE_REPEAT = 0, - VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1, - VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2, - VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3, - VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4, - VK_SAMPLER_ADDRESS_MODE_BEGIN_RANGE = VK_SAMPLER_ADDRESS_MODE_REPEAT, - VK_SAMPLER_ADDRESS_MODE_END_RANGE = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER, - VK_SAMPLER_ADDRESS_MODE_RANGE_SIZE = (VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER - VK_SAMPLER_ADDRESS_MODE_REPEAT + 1), // NOLINT: misc-redundant-expression - VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF -} VkSamplerAddressMode; - -typedef enum VkBorderColor { - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0, - VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1, - VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2, - VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3, - VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4, - VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5, - VK_BORDER_COLOR_BEGIN_RANGE = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, - VK_BORDER_COLOR_END_RANGE = VK_BORDER_COLOR_INT_OPAQUE_WHITE, - VK_BORDER_COLOR_RANGE_SIZE = (VK_BORDER_COLOR_INT_OPAQUE_WHITE - VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK + 1), // NOLINT: misc-redundant-expression - VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF -} VkBorderColor; - -typedef enum VkDescriptorType { - VK_DESCRIPTOR_TYPE_SAMPLER = 0, - VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1, - VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2, - VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3, - VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4, - VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5, - VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6, - VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7, - VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8, - VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9, - VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10, - VK_DESCRIPTOR_TYPE_BEGIN_RANGE = VK_DESCRIPTOR_TYPE_SAMPLER, - VK_DESCRIPTOR_TYPE_END_RANGE = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT, - VK_DESCRIPTOR_TYPE_RANGE_SIZE = (VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT - VK_DESCRIPTOR_TYPE_SAMPLER + 1), // NOLINT: misc-redundant-expression - VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkDescriptorType; - -typedef enum VkAttachmentLoadOp { - VK_ATTACHMENT_LOAD_OP_LOAD = 0, - VK_ATTACHMENT_LOAD_OP_CLEAR = 1, - VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2, - VK_ATTACHMENT_LOAD_OP_BEGIN_RANGE = VK_ATTACHMENT_LOAD_OP_LOAD, - VK_ATTACHMENT_LOAD_OP_END_RANGE = VK_ATTACHMENT_LOAD_OP_DONT_CARE, - VK_ATTACHMENT_LOAD_OP_RANGE_SIZE = (VK_ATTACHMENT_LOAD_OP_DONT_CARE - VK_ATTACHMENT_LOAD_OP_LOAD + 1), // NOLINT: misc-redundant-expression - VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF -} VkAttachmentLoadOp; - -typedef enum VkAttachmentStoreOp { - VK_ATTACHMENT_STORE_OP_STORE = 0, - VK_ATTACHMENT_STORE_OP_DONT_CARE = 1, - VK_ATTACHMENT_STORE_OP_BEGIN_RANGE = VK_ATTACHMENT_STORE_OP_STORE, - VK_ATTACHMENT_STORE_OP_END_RANGE = VK_ATTACHMENT_STORE_OP_DONT_CARE, - VK_ATTACHMENT_STORE_OP_RANGE_SIZE = (VK_ATTACHMENT_STORE_OP_DONT_CARE - VK_ATTACHMENT_STORE_OP_STORE + 1), // NOLINT: misc-redundant-expression - VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF -} VkAttachmentStoreOp; - -typedef enum VkPipelineBindPoint { - VK_PIPELINE_BIND_POINT_GRAPHICS = 0, - VK_PIPELINE_BIND_POINT_COMPUTE = 1, - VK_PIPELINE_BIND_POINT_BEGIN_RANGE = VK_PIPELINE_BIND_POINT_GRAPHICS, - VK_PIPELINE_BIND_POINT_END_RANGE = VK_PIPELINE_BIND_POINT_COMPUTE, - VK_PIPELINE_BIND_POINT_RANGE_SIZE = (VK_PIPELINE_BIND_POINT_COMPUTE - VK_PIPELINE_BIND_POINT_GRAPHICS + 1), // NOLINT: misc-redundant-expression - VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF -} VkPipelineBindPoint; - -typedef enum VkCommandBufferLevel { - VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0, - VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1, - VK_COMMAND_BUFFER_LEVEL_BEGIN_RANGE = VK_COMMAND_BUFFER_LEVEL_PRIMARY, - VK_COMMAND_BUFFER_LEVEL_END_RANGE = VK_COMMAND_BUFFER_LEVEL_SECONDARY, - VK_COMMAND_BUFFER_LEVEL_RANGE_SIZE = (VK_COMMAND_BUFFER_LEVEL_SECONDARY - VK_COMMAND_BUFFER_LEVEL_PRIMARY + 1), // NOLINT: misc-redundant-expression - VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF -} VkCommandBufferLevel; - -typedef enum VkIndexType { - VK_INDEX_TYPE_UINT16 = 0, - VK_INDEX_TYPE_UINT32 = 1, - VK_INDEX_TYPE_BEGIN_RANGE = VK_INDEX_TYPE_UINT16, - VK_INDEX_TYPE_END_RANGE = VK_INDEX_TYPE_UINT32, - VK_INDEX_TYPE_RANGE_SIZE = (VK_INDEX_TYPE_UINT32 - VK_INDEX_TYPE_UINT16 + 1), // NOLINT: misc-redundant-expression - VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkIndexType; - -typedef enum VkSubpassContents { - VK_SUBPASS_CONTENTS_INLINE = 0, - VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1, - VK_SUBPASS_CONTENTS_BEGIN_RANGE = VK_SUBPASS_CONTENTS_INLINE, - VK_SUBPASS_CONTENTS_END_RANGE = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS, - VK_SUBPASS_CONTENTS_RANGE_SIZE = (VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS - VK_SUBPASS_CONTENTS_INLINE + 1), // NOLINT: misc-redundant-expression - VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF -} VkSubpassContents; - -typedef enum VkObjectType { - VK_OBJECT_TYPE_UNKNOWN = 0, - VK_OBJECT_TYPE_INSTANCE = 1, - VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2, - VK_OBJECT_TYPE_DEVICE = 3, - VK_OBJECT_TYPE_QUEUE = 4, - VK_OBJECT_TYPE_SEMAPHORE = 5, - VK_OBJECT_TYPE_COMMAND_BUFFER = 6, - VK_OBJECT_TYPE_FENCE = 7, - VK_OBJECT_TYPE_DEVICE_MEMORY = 8, - VK_OBJECT_TYPE_BUFFER = 9, - VK_OBJECT_TYPE_IMAGE = 10, - VK_OBJECT_TYPE_EVENT = 11, - VK_OBJECT_TYPE_QUERY_POOL = 12, - VK_OBJECT_TYPE_BUFFER_VIEW = 13, - VK_OBJECT_TYPE_IMAGE_VIEW = 14, - VK_OBJECT_TYPE_SHADER_MODULE = 15, - VK_OBJECT_TYPE_PIPELINE_CACHE = 16, - VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17, - VK_OBJECT_TYPE_RENDER_PASS = 18, - VK_OBJECT_TYPE_PIPELINE = 19, - VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20, - VK_OBJECT_TYPE_SAMPLER = 21, - VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22, - VK_OBJECT_TYPE_DESCRIPTOR_SET = 23, - VK_OBJECT_TYPE_FRAMEBUFFER = 24, - VK_OBJECT_TYPE_COMMAND_POOL = 25, - VK_OBJECT_TYPE_SURFACE_KHR = 1000000000, - VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000, - VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000, - VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001, - VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000, - VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = 1000085000, - VK_OBJECT_TYPE_OBJECT_TABLE_NVX = 1000086000, - VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX = 1000086001, - VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000, - VK_OBJECT_TYPE_BEGIN_RANGE = VK_OBJECT_TYPE_UNKNOWN, - VK_OBJECT_TYPE_END_RANGE = VK_OBJECT_TYPE_COMMAND_POOL, - VK_OBJECT_TYPE_RANGE_SIZE = (VK_OBJECT_TYPE_COMMAND_POOL - VK_OBJECT_TYPE_UNKNOWN + 1), // NOLINT: misc-redundant-expression - VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF -} VkObjectType; - -typedef enum VkInstanceCreateFlagBits { - VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR = 0x00000001, - VK_INSTANCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkInstanceCreateFlagBits; -typedef VkFlags VkInstanceCreateFlags; - -typedef enum VkFormatFeatureFlagBits { - VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001, - VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002, - VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004, - VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008, - VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010, - VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020, - VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040, - VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080, - VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100, - VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200, - VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400, - VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = 0x00002000, - VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = 0x00004000, - VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = 0x00008000, - VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = 0x00010000, - VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkFormatFeatureFlagBits; -typedef VkFlags VkFormatFeatureFlags; - -typedef enum VkImageUsageFlagBits { - VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001, - VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002, - VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004, - VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008, - VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010, - VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020, - VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040, - VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080, - VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageUsageFlagBits; -typedef VkFlags VkImageUsageFlags; - -typedef enum VkImageCreateFlagBits { - VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001, - VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, - VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004, - VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008, - VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010, - VK_IMAGE_CREATE_BIND_SFR_BIT_KHX = 0x00000040, - VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = 0x00000020, - VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000, - VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageCreateFlagBits; -typedef VkFlags VkImageCreateFlags; - -typedef enum VkSampleCountFlagBits { - VK_SAMPLE_COUNT_1_BIT = 0x00000001, - VK_SAMPLE_COUNT_2_BIT = 0x00000002, - VK_SAMPLE_COUNT_4_BIT = 0x00000004, - VK_SAMPLE_COUNT_8_BIT = 0x00000008, - VK_SAMPLE_COUNT_16_BIT = 0x00000010, - VK_SAMPLE_COUNT_32_BIT = 0x00000020, - VK_SAMPLE_COUNT_64_BIT = 0x00000040, - VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSampleCountFlagBits; -typedef VkFlags VkSampleCountFlags; - -typedef enum VkQueueFlagBits { - VK_QUEUE_GRAPHICS_BIT = 0x00000001, - VK_QUEUE_COMPUTE_BIT = 0x00000002, - VK_QUEUE_TRANSFER_BIT = 0x00000004, - VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008, - VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueueFlagBits; -typedef VkFlags VkQueueFlags; - -typedef enum VkMemoryPropertyFlagBits { - VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001, - VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002, - VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004, - VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008, - VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010, - VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkMemoryPropertyFlagBits; -typedef VkFlags VkMemoryPropertyFlags; - -typedef enum VkMemoryHeapFlagBits { - VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001, - VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHX = 0x00000002, - VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkMemoryHeapFlagBits; -typedef VkFlags VkMemoryHeapFlags; -typedef VkFlags VkDeviceCreateFlags; -typedef VkFlags VkDeviceQueueCreateFlags; - -typedef enum VkPipelineStageFlagBits { - VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001, - VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002, - VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004, - VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008, - VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010, - VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020, - VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040, - VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080, - VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100, - VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200, - VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400, - VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800, - VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000, - VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000, - VK_PIPELINE_STAGE_HOST_BIT = 0x00004000, - VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000, - VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000, - VK_PIPELINE_STAGE_COMMAND_PROCESS_BIT_NVX = 0x00020000, - VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkPipelineStageFlagBits; -typedef VkFlags VkPipelineStageFlags; -typedef VkFlags VkMemoryMapFlags; - -typedef enum VkImageAspectFlagBits { - VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001, - VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002, - VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004, - VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008, - VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkImageAspectFlagBits; -typedef VkFlags VkImageAspectFlags; - -typedef enum VkSparseImageFormatFlagBits { - VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001, - VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002, - VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004, - VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSparseImageFormatFlagBits; -typedef VkFlags VkSparseImageFormatFlags; - -typedef enum VkSparseMemoryBindFlagBits { - VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001, - VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSparseMemoryBindFlagBits; -typedef VkFlags VkSparseMemoryBindFlags; - -typedef enum VkFenceCreateFlagBits { - VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001, - VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkFenceCreateFlagBits; -typedef VkFlags VkFenceCreateFlags; -typedef VkFlags VkSemaphoreCreateFlags; -typedef VkFlags VkEventCreateFlags; -typedef VkFlags VkQueryPoolCreateFlags; - -typedef enum VkQueryPipelineStatisticFlagBits { - VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001, - VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002, - VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004, - VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008, - VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010, - VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020, - VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040, - VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080, - VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100, - VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200, - VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400, - VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueryPipelineStatisticFlagBits; -typedef VkFlags VkQueryPipelineStatisticFlags; - -typedef enum VkQueryResultFlagBits { - VK_QUERY_RESULT_64_BIT = 0x00000001, - VK_QUERY_RESULT_WAIT_BIT = 0x00000002, - VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004, - VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008, - VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueryResultFlagBits; -typedef VkFlags VkQueryResultFlags; - -typedef enum VkBufferCreateFlagBits { - VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001, - VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002, - VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004, - VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkBufferCreateFlagBits; -typedef VkFlags VkBufferCreateFlags; - -typedef enum VkBufferUsageFlagBits { - VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001, - VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002, - VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004, - VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008, - VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010, - VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020, - VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040, - VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080, - VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100, - VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkBufferUsageFlagBits; -typedef VkFlags VkBufferUsageFlags; -typedef VkFlags VkBufferViewCreateFlags; -typedef VkFlags VkImageViewCreateFlags; -typedef VkFlags VkShaderModuleCreateFlags; -typedef VkFlags VkPipelineCacheCreateFlags; - -typedef enum VkPipelineCreateFlagBits { - VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001, - VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002, - VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004, - VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHX = 0x00000008, - VK_PIPELINE_CREATE_DISPATCH_BASE_KHX = 0x00000010, - VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkPipelineCreateFlagBits; -typedef VkFlags VkPipelineCreateFlags; -typedef VkFlags VkPipelineShaderStageCreateFlags; - -typedef enum VkShaderStageFlagBits { - VK_SHADER_STAGE_VERTEX_BIT = 0x00000001, - VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002, - VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004, - VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008, - VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010, - VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020, - VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F, - VK_SHADER_STAGE_ALL = 0x7FFFFFFF, - VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkShaderStageFlagBits; -typedef VkFlags VkPipelineVertexInputStateCreateFlags; -typedef VkFlags VkPipelineInputAssemblyStateCreateFlags; -typedef VkFlags VkPipelineTessellationStateCreateFlags; -typedef VkFlags VkPipelineViewportStateCreateFlags; -typedef VkFlags VkPipelineRasterizationStateCreateFlags; - -typedef enum VkCullModeFlagBits { - VK_CULL_MODE_NONE = 0, - VK_CULL_MODE_FRONT_BIT = 0x00000001, - VK_CULL_MODE_BACK_BIT = 0x00000002, - VK_CULL_MODE_FRONT_AND_BACK = 0x00000003, - VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCullModeFlagBits; -typedef VkFlags VkCullModeFlags; -typedef VkFlags VkPipelineMultisampleStateCreateFlags; -typedef VkFlags VkPipelineDepthStencilStateCreateFlags; -typedef VkFlags VkPipelineColorBlendStateCreateFlags; - -typedef enum VkColorComponentFlagBits { - VK_COLOR_COMPONENT_R_BIT = 0x00000001, - VK_COLOR_COMPONENT_G_BIT = 0x00000002, - VK_COLOR_COMPONENT_B_BIT = 0x00000004, - VK_COLOR_COMPONENT_A_BIT = 0x00000008, - VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkColorComponentFlagBits; -typedef VkFlags VkColorComponentFlags; -typedef VkFlags VkPipelineDynamicStateCreateFlags; -typedef VkFlags VkPipelineLayoutCreateFlags; -typedef VkFlags VkShaderStageFlags; -typedef VkFlags VkSamplerCreateFlags; - -typedef enum VkDescriptorSetLayoutCreateFlagBits { - VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001, - VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkDescriptorSetLayoutCreateFlagBits; -typedef VkFlags VkDescriptorSetLayoutCreateFlags; - -typedef enum VkDescriptorPoolCreateFlagBits { - VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001, - VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkDescriptorPoolCreateFlagBits; -typedef VkFlags VkDescriptorPoolCreateFlags; -typedef VkFlags VkDescriptorPoolResetFlags; -typedef VkFlags VkFramebufferCreateFlags; -typedef VkFlags VkRenderPassCreateFlags; - -typedef enum VkAttachmentDescriptionFlagBits { - VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001, - VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkAttachmentDescriptionFlagBits; -typedef VkFlags VkAttachmentDescriptionFlags; - -typedef enum VkSubpassDescriptionFlagBits { - VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001, - VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002, - VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkSubpassDescriptionFlagBits; -typedef VkFlags VkSubpassDescriptionFlags; - -typedef enum VkAccessFlagBits { - VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001, - VK_ACCESS_INDEX_READ_BIT = 0x00000002, - VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004, - VK_ACCESS_UNIFORM_READ_BIT = 0x00000008, - VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010, - VK_ACCESS_SHADER_READ_BIT = 0x00000020, - VK_ACCESS_SHADER_WRITE_BIT = 0x00000040, - VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080, - VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100, - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200, - VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400, - VK_ACCESS_TRANSFER_READ_BIT = 0x00000800, - VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000, - VK_ACCESS_HOST_READ_BIT = 0x00002000, - VK_ACCESS_HOST_WRITE_BIT = 0x00004000, - VK_ACCESS_MEMORY_READ_BIT = 0x00008000, - VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000, - VK_ACCESS_COMMAND_PROCESS_READ_BIT_NVX = 0x00020000, - VK_ACCESS_COMMAND_PROCESS_WRITE_BIT_NVX = 0x00040000, - VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000, - VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkAccessFlagBits; -typedef VkFlags VkAccessFlags; - -typedef enum VkDependencyFlagBits { - VK_DEPENDENCY_BY_REGION_BIT = 0x00000001, - VK_DEPENDENCY_VIEW_LOCAL_BIT_KHX = 0x00000002, - VK_DEPENDENCY_DEVICE_GROUP_BIT_KHX = 0x00000004, - VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkDependencyFlagBits; -typedef VkFlags VkDependencyFlags; - -typedef enum VkCommandPoolCreateFlagBits { - VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001, - VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002, - VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCommandPoolCreateFlagBits; -typedef VkFlags VkCommandPoolCreateFlags; - -typedef enum VkCommandPoolResetFlagBits { - VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001, - VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCommandPoolResetFlagBits; -typedef VkFlags VkCommandPoolResetFlags; - -typedef enum VkCommandBufferUsageFlagBits { - VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001, - VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002, - VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004, - VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCommandBufferUsageFlagBits; -typedef VkFlags VkCommandBufferUsageFlags; - -typedef enum VkQueryControlFlagBits { - VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001, - VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkQueryControlFlagBits; -typedef VkFlags VkQueryControlFlags; - -typedef enum VkCommandBufferResetFlagBits { - VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001, - VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkCommandBufferResetFlagBits; -typedef VkFlags VkCommandBufferResetFlags; - -typedef enum VkStencilFaceFlagBits { - VK_STENCIL_FACE_FRONT_BIT = 0x00000001, - VK_STENCIL_FACE_BACK_BIT = 0x00000002, - VK_STENCIL_FRONT_AND_BACK = 0x00000003, - VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF -} VkStencilFaceFlagBits; -typedef VkFlags VkStencilFaceFlags; - -typedef struct VkApplicationInfo { - VkStructureType sType; - const void *pNext; - const char *pApplicationName; - uint32_t applicationVersion; - const char *pEngineName; - uint32_t engineVersion; - uint32_t apiVersion; -} VkApplicationInfo; - -typedef struct VkInstanceCreateInfo { - VkStructureType sType; - const void *pNext; - VkInstanceCreateFlags flags; - const VkApplicationInfo *pApplicationInfo; - uint32_t enabledLayerCount; - const char *const *ppEnabledLayerNames; - uint32_t enabledExtensionCount; - const char *const *ppEnabledExtensionNames; -} VkInstanceCreateInfo; - -typedef void *(VKAPI_PTR *PFN_vkAllocationFunction)( - void *pUserData, - size_t size, - size_t alignment, - VkSystemAllocationScope allocationScope); - -typedef void *(VKAPI_PTR *PFN_vkReallocationFunction)( - void *pUserData, - void *pOriginal, - size_t size, - size_t alignment, - VkSystemAllocationScope allocationScope); - -typedef void(VKAPI_PTR *PFN_vkFreeFunction)( - void *pUserData, - void *pMemory); - -typedef void(VKAPI_PTR *PFN_vkInternalAllocationNotification)( - void *pUserData, - size_t size, - VkInternalAllocationType allocationType, - VkSystemAllocationScope allocationScope); - -typedef void(VKAPI_PTR *PFN_vkInternalFreeNotification)( - void *pUserData, - size_t size, - VkInternalAllocationType allocationType, - VkSystemAllocationScope allocationScope); - -typedef struct VkAllocationCallbacks { - void *pUserData; - PFN_vkAllocationFunction pfnAllocation; - PFN_vkReallocationFunction pfnReallocation; - PFN_vkFreeFunction pfnFree; - PFN_vkInternalAllocationNotification pfnInternalAllocation; - PFN_vkInternalFreeNotification pfnInternalFree; -} VkAllocationCallbacks; - -typedef struct VkPhysicalDeviceFeatures { - VkBool32 robustBufferAccess; - VkBool32 fullDrawIndexUint32; - VkBool32 imageCubeArray; - VkBool32 independentBlend; - VkBool32 geometryShader; - VkBool32 tessellationShader; - VkBool32 sampleRateShading; - VkBool32 dualSrcBlend; - VkBool32 logicOp; - VkBool32 multiDrawIndirect; - VkBool32 drawIndirectFirstInstance; - VkBool32 depthClamp; - VkBool32 depthBiasClamp; - VkBool32 fillModeNonSolid; - VkBool32 depthBounds; - VkBool32 wideLines; - VkBool32 largePoints; - VkBool32 alphaToOne; - VkBool32 multiViewport; - VkBool32 samplerAnisotropy; - VkBool32 textureCompressionETC2; - VkBool32 textureCompressionASTC_LDR; - VkBool32 textureCompressionBC; - VkBool32 occlusionQueryPrecise; - VkBool32 pipelineStatisticsQuery; - VkBool32 vertexPipelineStoresAndAtomics; - VkBool32 fragmentStoresAndAtomics; - VkBool32 shaderTessellationAndGeometryPointSize; - VkBool32 shaderImageGatherExtended; - VkBool32 shaderStorageImageExtendedFormats; - VkBool32 shaderStorageImageMultisample; - VkBool32 shaderStorageImageReadWithoutFormat; - VkBool32 shaderStorageImageWriteWithoutFormat; - VkBool32 shaderUniformBufferArrayDynamicIndexing; - VkBool32 shaderSampledImageArrayDynamicIndexing; - VkBool32 shaderStorageBufferArrayDynamicIndexing; - VkBool32 shaderStorageImageArrayDynamicIndexing; - VkBool32 shaderClipDistance; - VkBool32 shaderCullDistance; - VkBool32 shaderFloat64; - VkBool32 shaderInt64; - VkBool32 shaderInt16; - VkBool32 shaderResourceResidency; - VkBool32 shaderResourceMinLod; - VkBool32 sparseBinding; - VkBool32 sparseResidencyBuffer; - VkBool32 sparseResidencyImage2D; - VkBool32 sparseResidencyImage3D; - VkBool32 sparseResidency2Samples; - VkBool32 sparseResidency4Samples; - VkBool32 sparseResidency8Samples; - VkBool32 sparseResidency16Samples; - VkBool32 sparseResidencyAliased; - VkBool32 variableMultisampleRate; - VkBool32 inheritedQueries; -} VkPhysicalDeviceFeatures; - -typedef struct VkFormatProperties { - VkFormatFeatureFlags linearTilingFeatures; - VkFormatFeatureFlags optimalTilingFeatures; - VkFormatFeatureFlags bufferFeatures; -} VkFormatProperties; - -typedef struct VkExtent3D { - uint32_t width; - uint32_t height; - uint32_t depth; -} VkExtent3D; - -typedef struct VkImageFormatProperties { - VkExtent3D maxExtent; - uint32_t maxMipLevels; - uint32_t maxArrayLayers; - VkSampleCountFlags sampleCounts; - VkDeviceSize maxResourceSize; -} VkImageFormatProperties; - -typedef struct VkPhysicalDeviceLimits { - uint32_t maxImageDimension1D; - uint32_t maxImageDimension2D; - uint32_t maxImageDimension3D; - uint32_t maxImageDimensionCube; - uint32_t maxImageArrayLayers; - uint32_t maxTexelBufferElements; - uint32_t maxUniformBufferRange; - uint32_t maxStorageBufferRange; - uint32_t maxPushConstantsSize; - uint32_t maxMemoryAllocationCount; - uint32_t maxSamplerAllocationCount; - VkDeviceSize bufferImageGranularity; - VkDeviceSize sparseAddressSpaceSize; - uint32_t maxBoundDescriptorSets; - uint32_t maxPerStageDescriptorSamplers; - uint32_t maxPerStageDescriptorUniformBuffers; - uint32_t maxPerStageDescriptorStorageBuffers; - uint32_t maxPerStageDescriptorSampledImages; - uint32_t maxPerStageDescriptorStorageImages; - uint32_t maxPerStageDescriptorInputAttachments; - uint32_t maxPerStageResources; - uint32_t maxDescriptorSetSamplers; - uint32_t maxDescriptorSetUniformBuffers; - uint32_t maxDescriptorSetUniformBuffersDynamic; - uint32_t maxDescriptorSetStorageBuffers; - uint32_t maxDescriptorSetStorageBuffersDynamic; - uint32_t maxDescriptorSetSampledImages; - uint32_t maxDescriptorSetStorageImages; - uint32_t maxDescriptorSetInputAttachments; - uint32_t maxVertexInputAttributes; - uint32_t maxVertexInputBindings; - uint32_t maxVertexInputAttributeOffset; - uint32_t maxVertexInputBindingStride; - uint32_t maxVertexOutputComponents; - uint32_t maxTessellationGenerationLevel; - uint32_t maxTessellationPatchSize; - uint32_t maxTessellationControlPerVertexInputComponents; - uint32_t maxTessellationControlPerVertexOutputComponents; - uint32_t maxTessellationControlPerPatchOutputComponents; - uint32_t maxTessellationControlTotalOutputComponents; - uint32_t maxTessellationEvaluationInputComponents; - uint32_t maxTessellationEvaluationOutputComponents; - uint32_t maxGeometryShaderInvocations; - uint32_t maxGeometryInputComponents; - uint32_t maxGeometryOutputComponents; - uint32_t maxGeometryOutputVertices; - uint32_t maxGeometryTotalOutputComponents; - uint32_t maxFragmentInputComponents; - uint32_t maxFragmentOutputAttachments; - uint32_t maxFragmentDualSrcAttachments; - uint32_t maxFragmentCombinedOutputResources; - uint32_t maxComputeSharedMemorySize; - uint32_t maxComputeWorkGroupCount[3]; - uint32_t maxComputeWorkGroupInvocations; - uint32_t maxComputeWorkGroupSize[3]; - uint32_t subPixelPrecisionBits; - uint32_t subTexelPrecisionBits; - uint32_t mipmapPrecisionBits; - uint32_t maxDrawIndexedIndexValue; - uint32_t maxDrawIndirectCount; - float maxSamplerLodBias; - float maxSamplerAnisotropy; - uint32_t maxViewports; - uint32_t maxViewportDimensions[2]; - float viewportBoundsRange[2]; - uint32_t viewportSubPixelBits; - size_t minMemoryMapAlignment; - VkDeviceSize minTexelBufferOffsetAlignment; - VkDeviceSize minUniformBufferOffsetAlignment; - VkDeviceSize minStorageBufferOffsetAlignment; - int32_t minTexelOffset; - uint32_t maxTexelOffset; - int32_t minTexelGatherOffset; - uint32_t maxTexelGatherOffset; - float minInterpolationOffset; - float maxInterpolationOffset; - uint32_t subPixelInterpolationOffsetBits; - uint32_t maxFramebufferWidth; - uint32_t maxFramebufferHeight; - uint32_t maxFramebufferLayers; - VkSampleCountFlags framebufferColorSampleCounts; - VkSampleCountFlags framebufferDepthSampleCounts; - VkSampleCountFlags framebufferStencilSampleCounts; - VkSampleCountFlags framebufferNoAttachmentsSampleCounts; - uint32_t maxColorAttachments; - VkSampleCountFlags sampledImageColorSampleCounts; - VkSampleCountFlags sampledImageIntegerSampleCounts; - VkSampleCountFlags sampledImageDepthSampleCounts; - VkSampleCountFlags sampledImageStencilSampleCounts; - VkSampleCountFlags storageImageSampleCounts; - uint32_t maxSampleMaskWords; - VkBool32 timestampComputeAndGraphics; - float timestampPeriod; - uint32_t maxClipDistances; - uint32_t maxCullDistances; - uint32_t maxCombinedClipAndCullDistances; - uint32_t discreteQueuePriorities; - float pointSizeRange[2]; - float lineWidthRange[2]; - float pointSizeGranularity; - float lineWidthGranularity; - VkBool32 strictLines; - VkBool32 standardSampleLocations; - VkDeviceSize optimalBufferCopyOffsetAlignment; - VkDeviceSize optimalBufferCopyRowPitchAlignment; - VkDeviceSize nonCoherentAtomSize; -} VkPhysicalDeviceLimits; - -typedef struct VkPhysicalDeviceSparseProperties { - VkBool32 residencyStandard2DBlockShape; - VkBool32 residencyStandard2DMultisampleBlockShape; - VkBool32 residencyStandard3DBlockShape; - VkBool32 residencyAlignedMipSize; - VkBool32 residencyNonResidentStrict; -} VkPhysicalDeviceSparseProperties; - -typedef struct VkPhysicalDeviceProperties { - uint32_t apiVersion; - uint32_t driverVersion; - uint32_t vendorID; - uint32_t deviceID; - VkPhysicalDeviceType deviceType; - char deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE]; - uint8_t pipelineCacheUUID[VK_UUID_SIZE]; - VkPhysicalDeviceLimits limits; - VkPhysicalDeviceSparseProperties sparseProperties; -} VkPhysicalDeviceProperties; - -typedef struct VkQueueFamilyProperties { - VkQueueFlags queueFlags; - uint32_t queueCount; - uint32_t timestampValidBits; - VkExtent3D minImageTransferGranularity; -} VkQueueFamilyProperties; - -typedef struct VkMemoryType { - VkMemoryPropertyFlags propertyFlags; - uint32_t heapIndex; -} VkMemoryType; - -typedef struct VkMemoryHeap { - VkDeviceSize size; - VkMemoryHeapFlags flags; -} VkMemoryHeap; - -typedef struct VkPhysicalDeviceMemoryProperties { - uint32_t memoryTypeCount; - VkMemoryType memoryTypes[VK_MAX_MEMORY_TYPES]; - uint32_t memoryHeapCount; - VkMemoryHeap memoryHeaps[VK_MAX_MEMORY_HEAPS]; -} VkPhysicalDeviceMemoryProperties; - -typedef void(VKAPI_PTR *PFN_vkVoidFunction)(); -typedef struct VkDeviceQueueCreateInfo { - VkStructureType sType; - const void *pNext; - VkDeviceQueueCreateFlags flags; - uint32_t queueFamilyIndex; - uint32_t queueCount; - const float *pQueuePriorities; -} VkDeviceQueueCreateInfo; - -typedef struct VkDeviceCreateInfo { - VkStructureType sType; - const void *pNext; - VkDeviceCreateFlags flags; - uint32_t queueCreateInfoCount; - const VkDeviceQueueCreateInfo *pQueueCreateInfos; - uint32_t enabledLayerCount; - const char *const *ppEnabledLayerNames; - uint32_t enabledExtensionCount; - const char *const *ppEnabledExtensionNames; - const VkPhysicalDeviceFeatures *pEnabledFeatures; -} VkDeviceCreateInfo; - -typedef struct VkExtensionProperties { - char extensionName[VK_MAX_EXTENSION_NAME_SIZE]; - uint32_t specVersion; -} VkExtensionProperties; - -typedef struct VkLayerProperties { - char layerName[VK_MAX_EXTENSION_NAME_SIZE]; - uint32_t specVersion; - uint32_t implementationVersion; - char description[VK_MAX_DESCRIPTION_SIZE]; -} VkLayerProperties; - -typedef struct VkSubmitInfo { - VkStructureType sType; - const void *pNext; - uint32_t waitSemaphoreCount; - const VkSemaphore *pWaitSemaphores; - const VkPipelineStageFlags *pWaitDstStageMask; - uint32_t commandBufferCount; - const VkCommandBuffer *pCommandBuffers; - uint32_t signalSemaphoreCount; - const VkSemaphore *pSignalSemaphores; -} VkSubmitInfo; - -typedef struct VkMemoryAllocateInfo { - VkStructureType sType; - const void *pNext; - VkDeviceSize allocationSize; - uint32_t memoryTypeIndex; -} VkMemoryAllocateInfo; - -typedef struct VkMappedMemoryRange { - VkStructureType sType; - const void *pNext; - VkDeviceMemory memory; - VkDeviceSize offset; - VkDeviceSize size; -} VkMappedMemoryRange; - -typedef struct VkMemoryRequirements { - VkDeviceSize size; - VkDeviceSize alignment; - uint32_t memoryTypeBits; -} VkMemoryRequirements; - -typedef struct VkSparseImageFormatProperties { - VkImageAspectFlags aspectMask; - VkExtent3D imageGranularity; - VkSparseImageFormatFlags flags; -} VkSparseImageFormatProperties; - -typedef struct VkSparseImageMemoryRequirements { - VkSparseImageFormatProperties formatProperties; - uint32_t imageMipTailFirstLod; - VkDeviceSize imageMipTailSize; - VkDeviceSize imageMipTailOffset; - VkDeviceSize imageMipTailStride; -} VkSparseImageMemoryRequirements; - -typedef struct VkSparseMemoryBind { - VkDeviceSize resourceOffset; - VkDeviceSize size; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - VkSparseMemoryBindFlags flags; -} VkSparseMemoryBind; - -typedef struct VkSparseBufferMemoryBindInfo { - VkBuffer buffer; - uint32_t bindCount; - const VkSparseMemoryBind *pBinds; -} VkSparseBufferMemoryBindInfo; - -typedef struct VkSparseImageOpaqueMemoryBindInfo { - VkImage image; - uint32_t bindCount; - const VkSparseMemoryBind *pBinds; -} VkSparseImageOpaqueMemoryBindInfo; - -typedef struct VkImageSubresource { - VkImageAspectFlags aspectMask; - uint32_t mipLevel; - uint32_t arrayLayer; -} VkImageSubresource; - -typedef struct VkOffset3D { - int32_t x; - int32_t y; - int32_t z; -} VkOffset3D; - -typedef struct VkSparseImageMemoryBind { - VkImageSubresource subresource; - VkOffset3D offset; - VkExtent3D extent; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - VkSparseMemoryBindFlags flags; -} VkSparseImageMemoryBind; - -typedef struct VkSparseImageMemoryBindInfo { - VkImage image; - uint32_t bindCount; - const VkSparseImageMemoryBind *pBinds; -} VkSparseImageMemoryBindInfo; - -typedef struct VkBindSparseInfo { - VkStructureType sType; - const void *pNext; - uint32_t waitSemaphoreCount; - const VkSemaphore *pWaitSemaphores; - uint32_t bufferBindCount; - const VkSparseBufferMemoryBindInfo *pBufferBinds; - uint32_t imageOpaqueBindCount; - const VkSparseImageOpaqueMemoryBindInfo *pImageOpaqueBinds; - uint32_t imageBindCount; - const VkSparseImageMemoryBindInfo *pImageBinds; - uint32_t signalSemaphoreCount; - const VkSemaphore *pSignalSemaphores; -} VkBindSparseInfo; - -typedef struct VkFenceCreateInfo { - VkStructureType sType; - const void *pNext; - VkFenceCreateFlags flags; -} VkFenceCreateInfo; - -typedef struct VkSemaphoreCreateInfo { - VkStructureType sType; - const void *pNext; - VkSemaphoreCreateFlags flags; -} VkSemaphoreCreateInfo; - -typedef struct VkEventCreateInfo { - VkStructureType sType; - const void *pNext; - VkEventCreateFlags flags; -} VkEventCreateInfo; - -typedef struct VkQueryPoolCreateInfo { - VkStructureType sType; - const void *pNext; - VkQueryPoolCreateFlags flags; - VkQueryType queryType; - uint32_t queryCount; - VkQueryPipelineStatisticFlags pipelineStatistics; -} VkQueryPoolCreateInfo; - -typedef struct VkBufferCreateInfo { - VkStructureType sType; - const void *pNext; - VkBufferCreateFlags flags; - VkDeviceSize size; - VkBufferUsageFlags usage; - VkSharingMode sharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t *pQueueFamilyIndices; -} VkBufferCreateInfo; - -typedef struct VkBufferViewCreateInfo { - VkStructureType sType; - const void *pNext; - VkBufferViewCreateFlags flags; - VkBuffer buffer; - VkFormat format; - VkDeviceSize offset; - VkDeviceSize range; -} VkBufferViewCreateInfo; - -typedef struct VkImageCreateInfo { - VkStructureType sType; - const void *pNext; - VkImageCreateFlags flags; - VkImageType imageType; - VkFormat format; - VkExtent3D extent; - uint32_t mipLevels; - uint32_t arrayLayers; - VkSampleCountFlagBits samples; - VkImageTiling tiling; - VkImageUsageFlags usage; - VkSharingMode sharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t *pQueueFamilyIndices; - VkImageLayout initialLayout; -} VkImageCreateInfo; - -typedef struct VkSubresourceLayout { - VkDeviceSize offset; - VkDeviceSize size; - VkDeviceSize rowPitch; - VkDeviceSize arrayPitch; - VkDeviceSize depthPitch; -} VkSubresourceLayout; - -typedef struct VkComponentMapping { - VkComponentSwizzle r; - VkComponentSwizzle g; - VkComponentSwizzle b; - VkComponentSwizzle a; -} VkComponentMapping; - -typedef struct VkImageSubresourceRange { - VkImageAspectFlags aspectMask; - uint32_t baseMipLevel; - uint32_t levelCount; - uint32_t baseArrayLayer; - uint32_t layerCount; -} VkImageSubresourceRange; - -typedef struct VkImageViewCreateInfo { - VkStructureType sType; - const void *pNext; - VkImageViewCreateFlags flags; - VkImage image; - VkImageViewType viewType; - VkFormat format; - VkComponentMapping components; - VkImageSubresourceRange subresourceRange; -} VkImageViewCreateInfo; - -typedef struct VkShaderModuleCreateInfo { - VkStructureType sType; - const void *pNext; - VkShaderModuleCreateFlags flags; - size_t codeSize; - const uint32_t *pCode; -} VkShaderModuleCreateInfo; - -typedef struct VkPipelineCacheCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineCacheCreateFlags flags; - size_t initialDataSize; - const void *pInitialData; -} VkPipelineCacheCreateInfo; - -typedef struct VkSpecializationMapEntry { - uint32_t constantID; - uint32_t offset; - size_t size; -} VkSpecializationMapEntry; - -typedef struct VkSpecializationInfo { - uint32_t mapEntryCount; - const VkSpecializationMapEntry *pMapEntries; - size_t dataSize; - const void *pData; -} VkSpecializationInfo; - -typedef struct VkPipelineShaderStageCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineShaderStageCreateFlags flags; - VkShaderStageFlagBits stage; - VkShaderModule module; - const char *pName; - const VkSpecializationInfo *pSpecializationInfo; -} VkPipelineShaderStageCreateInfo; - -typedef struct VkVertexInputBindingDescription { - uint32_t binding; - uint32_t stride; - VkVertexInputRate inputRate; -} VkVertexInputBindingDescription; - -typedef struct VkVertexInputAttributeDescription { - uint32_t location; - uint32_t binding; - VkFormat format; - uint32_t offset; -} VkVertexInputAttributeDescription; - -typedef struct VkPipelineVertexInputStateCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineVertexInputStateCreateFlags flags; - uint32_t vertexBindingDescriptionCount; - const VkVertexInputBindingDescription *pVertexBindingDescriptions; - uint32_t vertexAttributeDescriptionCount; - const VkVertexInputAttributeDescription *pVertexAttributeDescriptions; -} VkPipelineVertexInputStateCreateInfo; - -typedef struct VkPipelineInputAssemblyStateCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineInputAssemblyStateCreateFlags flags; - VkPrimitiveTopology topology; - VkBool32 primitiveRestartEnable; -} VkPipelineInputAssemblyStateCreateInfo; - -typedef struct VkPipelineTessellationStateCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineTessellationStateCreateFlags flags; - uint32_t patchControlPoints; -} VkPipelineTessellationStateCreateInfo; - -typedef struct VkViewport { - float x; - float y; - float width; - float height; - float minDepth; - float maxDepth; -} VkViewport; - -typedef struct VkOffset2D { - int32_t x; - int32_t y; -} VkOffset2D; - -typedef struct VkExtent2D { - uint32_t width; - uint32_t height; -} VkExtent2D; - -typedef struct VkRect2D { - VkOffset2D offset; - VkExtent2D extent; -} VkRect2D; - -typedef struct VkPipelineViewportStateCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineViewportStateCreateFlags flags; - uint32_t viewportCount; - const VkViewport *pViewports; - uint32_t scissorCount; - const VkRect2D *pScissors; -} VkPipelineViewportStateCreateInfo; - -typedef struct VkPipelineRasterizationStateCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineRasterizationStateCreateFlags flags; - VkBool32 depthClampEnable; - VkBool32 rasterizerDiscardEnable; - VkPolygonMode polygonMode; - VkCullModeFlags cullMode; - VkFrontFace frontFace; - VkBool32 depthBiasEnable; - float depthBiasConstantFactor; - float depthBiasClamp; - float depthBiasSlopeFactor; - float lineWidth; -} VkPipelineRasterizationStateCreateInfo; - -typedef struct VkPipelineMultisampleStateCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineMultisampleStateCreateFlags flags; - VkSampleCountFlagBits rasterizationSamples; - VkBool32 sampleShadingEnable; - float minSampleShading; - const VkSampleMask *pSampleMask; - VkBool32 alphaToCoverageEnable; - VkBool32 alphaToOneEnable; -} VkPipelineMultisampleStateCreateInfo; - -typedef struct VkStencilOpState { - VkStencilOp failOp; - VkStencilOp passOp; - VkStencilOp depthFailOp; - VkCompareOp compareOp; - uint32_t compareMask; - uint32_t writeMask; - uint32_t reference; -} VkStencilOpState; - -typedef struct VkPipelineDepthStencilStateCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineDepthStencilStateCreateFlags flags; - VkBool32 depthTestEnable; - VkBool32 depthWriteEnable; - VkCompareOp depthCompareOp; - VkBool32 depthBoundsTestEnable; - VkBool32 stencilTestEnable; - VkStencilOpState front; - VkStencilOpState back; - float minDepthBounds; - float maxDepthBounds; -} VkPipelineDepthStencilStateCreateInfo; - -typedef struct VkPipelineColorBlendAttachmentState { - VkBool32 blendEnable; - VkBlendFactor srcColorBlendFactor; - VkBlendFactor dstColorBlendFactor; - VkBlendOp colorBlendOp; - VkBlendFactor srcAlphaBlendFactor; - VkBlendFactor dstAlphaBlendFactor; - VkBlendOp alphaBlendOp; - VkColorComponentFlags colorWriteMask; -} VkPipelineColorBlendAttachmentState; - -typedef struct VkPipelineColorBlendStateCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineColorBlendStateCreateFlags flags; - VkBool32 logicOpEnable; - VkLogicOp logicOp; - uint32_t attachmentCount; - const VkPipelineColorBlendAttachmentState *pAttachments; - float blendConstants[4]; -} VkPipelineColorBlendStateCreateInfo; - -typedef struct VkPipelineDynamicStateCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineDynamicStateCreateFlags flags; - uint32_t dynamicStateCount; - const VkDynamicState *pDynamicStates; -} VkPipelineDynamicStateCreateInfo; - -typedef struct VkGraphicsPipelineCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineCreateFlags flags; - uint32_t stageCount; - const VkPipelineShaderStageCreateInfo *pStages; - const VkPipelineVertexInputStateCreateInfo *pVertexInputState; - const VkPipelineInputAssemblyStateCreateInfo *pInputAssemblyState; - const VkPipelineTessellationStateCreateInfo *pTessellationState; - const VkPipelineViewportStateCreateInfo *pViewportState; - const VkPipelineRasterizationStateCreateInfo *pRasterizationState; - const VkPipelineMultisampleStateCreateInfo *pMultisampleState; - const VkPipelineDepthStencilStateCreateInfo *pDepthStencilState; - const VkPipelineColorBlendStateCreateInfo *pColorBlendState; - const VkPipelineDynamicStateCreateInfo *pDynamicState; - VkPipelineLayout layout; - VkRenderPass renderPass; - uint32_t subpass; - VkPipeline basePipelineHandle; - int32_t basePipelineIndex; -} VkGraphicsPipelineCreateInfo; - -typedef struct VkComputePipelineCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineCreateFlags flags; - VkPipelineShaderStageCreateInfo stage; - VkPipelineLayout layout; - VkPipeline basePipelineHandle; - int32_t basePipelineIndex; -} VkComputePipelineCreateInfo; - -typedef struct VkPushConstantRange { - VkShaderStageFlags stageFlags; - uint32_t offset; - uint32_t size; -} VkPushConstantRange; - -typedef struct VkPipelineLayoutCreateInfo { - VkStructureType sType; - const void *pNext; - VkPipelineLayoutCreateFlags flags; - uint32_t setLayoutCount; - const VkDescriptorSetLayout *pSetLayouts; - uint32_t pushConstantRangeCount; - const VkPushConstantRange *pPushConstantRanges; -} VkPipelineLayoutCreateInfo; - -typedef struct VkSamplerCreateInfo { - VkStructureType sType; - const void *pNext; - VkSamplerCreateFlags flags; - VkFilter magFilter; - VkFilter minFilter; - VkSamplerMipmapMode mipmapMode; - VkSamplerAddressMode addressModeU; - VkSamplerAddressMode addressModeV; - VkSamplerAddressMode addressModeW; - float mipLodBias; - VkBool32 anisotropyEnable; - float maxAnisotropy; - VkBool32 compareEnable; - VkCompareOp compareOp; - float minLod; - float maxLod; - VkBorderColor borderColor; - VkBool32 unnormalizedCoordinates; -} VkSamplerCreateInfo; - -typedef struct VkDescriptorSetLayoutBinding { - uint32_t binding; - VkDescriptorType descriptorType; - uint32_t descriptorCount; - VkShaderStageFlags stageFlags; - const VkSampler *pImmutableSamplers; -} VkDescriptorSetLayoutBinding; - -typedef struct VkDescriptorSetLayoutCreateInfo { - VkStructureType sType; - const void *pNext; - VkDescriptorSetLayoutCreateFlags flags; - uint32_t bindingCount; - const VkDescriptorSetLayoutBinding *pBindings; -} VkDescriptorSetLayoutCreateInfo; - -typedef struct VkDescriptorPoolSize { - VkDescriptorType type; - uint32_t descriptorCount; -} VkDescriptorPoolSize; - -typedef struct VkDescriptorPoolCreateInfo { - VkStructureType sType; - const void *pNext; - VkDescriptorPoolCreateFlags flags; - uint32_t maxSets; - uint32_t poolSizeCount; - const VkDescriptorPoolSize *pPoolSizes; -} VkDescriptorPoolCreateInfo; - -typedef struct VkDescriptorSetAllocateInfo { - VkStructureType sType; - const void *pNext; - VkDescriptorPool descriptorPool; - uint32_t descriptorSetCount; - const VkDescriptorSetLayout *pSetLayouts; -} VkDescriptorSetAllocateInfo; - -typedef struct VkDescriptorImageInfo { - VkSampler sampler; - VkImageView imageView; - VkImageLayout imageLayout; -} VkDescriptorImageInfo; - -typedef struct VkDescriptorBufferInfo { - VkBuffer buffer; - VkDeviceSize offset; - VkDeviceSize range; -} VkDescriptorBufferInfo; - -typedef struct VkWriteDescriptorSet { - VkStructureType sType; - const void *pNext; - VkDescriptorSet dstSet; - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; - VkDescriptorType descriptorType; - const VkDescriptorImageInfo *pImageInfo; - const VkDescriptorBufferInfo *pBufferInfo; - const VkBufferView *pTexelBufferView; -} VkWriteDescriptorSet; - -typedef struct VkCopyDescriptorSet { - VkStructureType sType; - const void *pNext; - VkDescriptorSet srcSet; - uint32_t srcBinding; - uint32_t srcArrayElement; - VkDescriptorSet dstSet; - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; -} VkCopyDescriptorSet; - -typedef struct VkFramebufferCreateInfo { - VkStructureType sType; - const void *pNext; - VkFramebufferCreateFlags flags; - VkRenderPass renderPass; - uint32_t attachmentCount; - const VkImageView *pAttachments; - uint32_t width; - uint32_t height; - uint32_t layers; -} VkFramebufferCreateInfo; - -typedef struct VkAttachmentDescription { - VkAttachmentDescriptionFlags flags; - VkFormat format; - VkSampleCountFlagBits samples; - VkAttachmentLoadOp loadOp; - VkAttachmentStoreOp storeOp; - VkAttachmentLoadOp stencilLoadOp; - VkAttachmentStoreOp stencilStoreOp; - VkImageLayout initialLayout; - VkImageLayout finalLayout; -} VkAttachmentDescription; - -typedef struct VkAttachmentReference { - uint32_t attachment; - VkImageLayout layout; -} VkAttachmentReference; - -typedef struct VkSubpassDescription { - VkSubpassDescriptionFlags flags; - VkPipelineBindPoint pipelineBindPoint; - uint32_t inputAttachmentCount; - const VkAttachmentReference *pInputAttachments; - uint32_t colorAttachmentCount; - const VkAttachmentReference *pColorAttachments; - const VkAttachmentReference *pResolveAttachments; - const VkAttachmentReference *pDepthStencilAttachment; - uint32_t preserveAttachmentCount; - const uint32_t *pPreserveAttachments; -} VkSubpassDescription; - -typedef struct VkSubpassDependency { - uint32_t srcSubpass; - uint32_t dstSubpass; - VkPipelineStageFlags srcStageMask; - VkPipelineStageFlags dstStageMask; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - VkDependencyFlags dependencyFlags; -} VkSubpassDependency; - -typedef struct VkRenderPassCreateInfo { - VkStructureType sType; - const void *pNext; - VkRenderPassCreateFlags flags; - uint32_t attachmentCount; - const VkAttachmentDescription *pAttachments; - uint32_t subpassCount; - const VkSubpassDescription *pSubpasses; - uint32_t dependencyCount; - const VkSubpassDependency *pDependencies; -} VkRenderPassCreateInfo; - -typedef struct VkCommandPoolCreateInfo { - VkStructureType sType; - const void *pNext; - VkCommandPoolCreateFlags flags; - uint32_t queueFamilyIndex; -} VkCommandPoolCreateInfo; - -typedef struct VkCommandBufferAllocateInfo { - VkStructureType sType; - const void *pNext; - VkCommandPool commandPool; - VkCommandBufferLevel level; - uint32_t commandBufferCount; -} VkCommandBufferAllocateInfo; - -typedef struct VkCommandBufferInheritanceInfo { - VkStructureType sType; - const void *pNext; - VkRenderPass renderPass; - uint32_t subpass; - VkFramebuffer framebuffer; - VkBool32 occlusionQueryEnable; - VkQueryControlFlags queryFlags; - VkQueryPipelineStatisticFlags pipelineStatistics; -} VkCommandBufferInheritanceInfo; - -typedef struct VkCommandBufferBeginInfo { - VkStructureType sType; - const void *pNext; - VkCommandBufferUsageFlags flags; - const VkCommandBufferInheritanceInfo *pInheritanceInfo; -} VkCommandBufferBeginInfo; - -typedef struct VkBufferCopy { - VkDeviceSize srcOffset; - VkDeviceSize dstOffset; - VkDeviceSize size; -} VkBufferCopy; - -typedef struct VkImageSubresourceLayers { - VkImageAspectFlags aspectMask; - uint32_t mipLevel; - uint32_t baseArrayLayer; - uint32_t layerCount; -} VkImageSubresourceLayers; - -typedef struct VkImageCopy { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffset; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffset; - VkExtent3D extent; -} VkImageCopy; - -typedef struct VkImageBlit { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffsets[2]; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffsets[2]; -} VkImageBlit; - -typedef struct VkBufferImageCopy { - VkDeviceSize bufferOffset; - uint32_t bufferRowLength; - uint32_t bufferImageHeight; - VkImageSubresourceLayers imageSubresource; - VkOffset3D imageOffset; - VkExtent3D imageExtent; -} VkBufferImageCopy; - -typedef union VkClearColorValue { - float float32[4]; - int32_t int32[4]; - uint32_t uint32[4]; -} VkClearColorValue; - -typedef struct VkClearDepthStencilValue { - float depth; - uint32_t stencil; -} VkClearDepthStencilValue; - -typedef union VkClearValue { - VkClearColorValue color; - VkClearDepthStencilValue depthStencil; -} VkClearValue; - -typedef struct VkClearAttachment { - VkImageAspectFlags aspectMask; - uint32_t colorAttachment; - VkClearValue clearValue; -} VkClearAttachment; - -typedef struct VkClearRect { - VkRect2D rect; - uint32_t baseArrayLayer; - uint32_t layerCount; -} VkClearRect; - -typedef struct VkImageResolve { - VkImageSubresourceLayers srcSubresource; - VkOffset3D srcOffset; - VkImageSubresourceLayers dstSubresource; - VkOffset3D dstOffset; - VkExtent3D extent; -} VkImageResolve; - -typedef struct VkMemoryBarrier { - VkStructureType sType; - const void *pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; -} VkMemoryBarrier; - -typedef struct VkBufferMemoryBarrier { - VkStructureType sType; - const void *pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - uint32_t srcQueueFamilyIndex; - uint32_t dstQueueFamilyIndex; - VkBuffer buffer; - VkDeviceSize offset; - VkDeviceSize size; -} VkBufferMemoryBarrier; - -typedef struct VkImageMemoryBarrier { - VkStructureType sType; - const void *pNext; - VkAccessFlags srcAccessMask; - VkAccessFlags dstAccessMask; - VkImageLayout oldLayout; - VkImageLayout newLayout; - uint32_t srcQueueFamilyIndex; - uint32_t dstQueueFamilyIndex; - VkImage image; - VkImageSubresourceRange subresourceRange; -} VkImageMemoryBarrier; - -typedef struct VkRenderPassBeginInfo { - VkStructureType sType; - const void *pNext; - VkRenderPass renderPass; - VkFramebuffer framebuffer; - VkRect2D renderArea; - uint32_t clearValueCount; - const VkClearValue *pClearValues; -} VkRenderPassBeginInfo; - -typedef struct VkDispatchIndirectCommand { - uint32_t x; - uint32_t y; - uint32_t z; -} VkDispatchIndirectCommand; - -typedef struct VkDrawIndexedIndirectCommand { - uint32_t indexCount; - uint32_t instanceCount; - uint32_t firstIndex; - int32_t vertexOffset; - uint32_t firstInstance; -} VkDrawIndexedIndirectCommand; - -typedef struct VkDrawIndirectCommand { - uint32_t vertexCount; - uint32_t instanceCount; - uint32_t firstVertex; - uint32_t firstInstance; -} VkDrawIndirectCommand; - -typedef VkResult(VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkInstance *pInstance); -typedef void(VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t *pPhysicalDeviceCount, VkPhysicalDevice *pPhysicalDevices); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties *pFormatProperties); -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties *pImageFormatProperties); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties *pQueueFamilyProperties); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties *pMemoryProperties); -typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char *pName); -typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char *pName); -typedef VkResult(VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice); -typedef void(VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties); -typedef VkResult(VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties); -typedef VkResult(VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t *pPropertyCount, VkLayerProperties *pProperties); -typedef VkResult(VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkLayerProperties *pProperties); -typedef void(VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue); -typedef VkResult(VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence); -typedef VkResult(VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue); -typedef VkResult(VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device); -typedef VkResult(VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo *pAllocateInfo, const VkAllocationCallbacks *pAllocator, VkDeviceMemory *pMemory); -typedef void(VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void **ppData); -typedef void(VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory); -typedef VkResult(VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges); -typedef VkResult(VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges); -typedef void(VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize *pCommittedMemoryInBytes); -typedef VkResult(VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset); -typedef VkResult(VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset); -typedef void(VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements); -typedef void(VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements); -typedef void(VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements *pSparseMemoryRequirements); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t *pPropertyCount, VkSparseImageFormatProperties *pProperties); -typedef VkResult(VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo, VkFence fence); -typedef VkResult(VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFence *pFence); -typedef void(VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence *pFences); -typedef VkResult(VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence); -typedef VkResult(VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, uint64_t timeout); -typedef VkResult(VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSemaphore *pSemaphore); -typedef void(VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkEvent *pEvent); -typedef void(VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event); -typedef VkResult(VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event); -typedef VkResult(VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event); -typedef VkResult(VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkQueryPool *pQueryPool); -typedef void(VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void *pData, VkDeviceSize stride, VkQueryResultFlags flags); -typedef VkResult(VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBuffer *pBuffer); -typedef void(VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkBufferView *pView); -typedef void(VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImage *pImage); -typedef void(VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator); -typedef void(VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource *pSubresource, VkSubresourceLayout *pLayout); -typedef VkResult(VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkImageView *pView); -typedef void(VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkShaderModule *pShaderModule); -typedef void(VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineCache *pPipelineCache); -typedef void(VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize, void *pData); -typedef VkResult(VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches); -typedef VkResult(VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines); -typedef VkResult(VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkPipeline *pPipelines); -typedef void(VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkPipelineLayout *pPipelineLayout); -typedef void(VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSampler *pSampler); -typedef void(VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorSetLayout *pSetLayout); -typedef void(VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorPool *pDescriptorPool); -typedef void(VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags); -typedef VkResult(VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo *pAllocateInfo, VkDescriptorSet *pDescriptorSets); -typedef VkResult(VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets); -typedef void(VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet *pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet *pDescriptorCopies); -typedef VkResult(VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkFramebuffer *pFramebuffer); -typedef void(VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass); -typedef void(VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator); -typedef void(VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity); -typedef VkResult(VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkCommandPool *pCommandPool); -typedef void(VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags); -typedef VkResult(VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo *pAllocateInfo, VkCommandBuffer *pCommandBuffers); -typedef void(VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers); -typedef VkResult(VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo); -typedef VkResult(VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer); -typedef VkResult(VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags); -typedef void(VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline); -typedef void(VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports); -typedef void(VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *pScissors); -typedef void(VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth); -typedef void(VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor); -typedef void(VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]); -typedef void(VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds); -typedef void(VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask); -typedef void(VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask); -typedef void(VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference); -typedef void(VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet *pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t *pDynamicOffsets); -typedef void(VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType); -typedef void(VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer *pBuffers, const VkDeviceSize *pOffsets); -typedef void(VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance); -typedef void(VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance); -typedef void(VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); -typedef void(VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride); -typedef void(VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); -typedef void(VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset); -typedef void(VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy *pRegions); -typedef void(VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy *pRegions); -typedef void(VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit *pRegions, VkFilter filter); -typedef void(VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy *pRegions); -typedef void(VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy *pRegions); -typedef void(VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData); -typedef void(VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data); -typedef void(VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue *pColor, uint32_t rangeCount, const VkImageSubresourceRange *pRanges); -typedef void(VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue *pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange *pRanges); -typedef void(VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment *pAttachments, uint32_t rectCount, const VkClearRect *pRects); -typedef void(VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve *pRegions); -typedef void(VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); -typedef void(VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask); -typedef void(VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent *pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers); -typedef void(VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers); -typedef void(VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags); -typedef void(VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query); -typedef void(VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); -typedef void(VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query); -typedef void(VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags); -typedef void(VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void *pValues); -typedef void(VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo *pRenderPassBegin, VkSubpassContents contents); -typedef void(VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents); -typedef void(VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer); -typedef void(VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance( - const VkInstanceCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkInstance *pInstance); - -VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr( - VkInstance instance, - const char *pName); - -VKAPI_ATTR void VKAPI_CALL vkDestroyInstance( - VkInstance instance, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices( - VkInstance instance, - uint32_t *pPhysicalDeviceCount, - VkPhysicalDevice *pPhysicalDevices); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures *pFeatures); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties *pFormatProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkImageFormatProperties *pImageFormatProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties *pProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties( - VkPhysicalDevice physicalDevice, - uint32_t *pQueueFamilyPropertyCount, - VkQueueFamilyProperties *pQueueFamilyProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties *pMemoryProperties); - -VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr( - VkDevice device, - const char *pName); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice( - VkPhysicalDevice physicalDevice, - const VkDeviceCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkDevice *pDevice); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDevice( - VkDevice device, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties( - const char *pLayerName, - uint32_t *pPropertyCount, - VkExtensionProperties *pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties( - VkPhysicalDevice physicalDevice, - const char *pLayerName, - uint32_t *pPropertyCount, - VkExtensionProperties *pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties( - uint32_t *pPropertyCount, - VkLayerProperties *pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties( - VkPhysicalDevice physicalDevice, - uint32_t *pPropertyCount, - VkLayerProperties *pProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue( - VkDevice device, - uint32_t queueFamilyIndex, - uint32_t queueIndex, - VkQueue *pQueue); - -VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit( - VkQueue queue, - uint32_t submitCount, - const VkSubmitInfo *pSubmits, - VkFence fence); - -VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle( - VkQueue queue); - -VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle( - VkDevice device); - -VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory( - VkDevice device, - const VkMemoryAllocateInfo *pAllocateInfo, - const VkAllocationCallbacks *pAllocator, - VkDeviceMemory *pMemory); - -VKAPI_ATTR void VKAPI_CALL vkFreeMemory( - VkDevice device, - VkDeviceMemory memory, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory( - VkDevice device, - VkDeviceMemory memory, - VkDeviceSize offset, - VkDeviceSize size, - VkMemoryMapFlags flags, - void **ppData); - -VKAPI_ATTR void VKAPI_CALL vkUnmapMemory( - VkDevice device, - VkDeviceMemory memory); - -VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges( - VkDevice device, - uint32_t memoryRangeCount, - const VkMappedMemoryRange *pMemoryRanges); - -VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges( - VkDevice device, - uint32_t memoryRangeCount, - const VkMappedMemoryRange *pMemoryRanges); - -VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment( - VkDevice device, - VkDeviceMemory memory, - VkDeviceSize *pCommittedMemoryInBytes); - -VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory( - VkDevice device, - VkBuffer buffer, - VkDeviceMemory memory, - VkDeviceSize memoryOffset); - -VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory( - VkDevice device, - VkImage image, - VkDeviceMemory memory, - VkDeviceSize memoryOffset); - -VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements( - VkDevice device, - VkBuffer buffer, - VkMemoryRequirements *pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements( - VkDevice device, - VkImage image, - VkMemoryRequirements *pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements( - VkDevice device, - VkImage image, - uint32_t *pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements *pSparseMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkSampleCountFlagBits samples, - VkImageUsageFlags usage, - VkImageTiling tiling, - uint32_t *pPropertyCount, - VkSparseImageFormatProperties *pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse( - VkQueue queue, - uint32_t bindInfoCount, - const VkBindSparseInfo *pBindInfo, - VkFence fence); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence( - VkDevice device, - const VkFenceCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkFence *pFence); - -VKAPI_ATTR void VKAPI_CALL vkDestroyFence( - VkDevice device, - VkFence fence, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetFences( - VkDevice device, - uint32_t fenceCount, - const VkFence *pFences); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus( - VkDevice device, - VkFence fence); - -VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences( - VkDevice device, - uint32_t fenceCount, - const VkFence *pFences, - VkBool32 waitAll, - uint64_t timeout); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore( - VkDevice device, - const VkSemaphoreCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkSemaphore *pSemaphore); - -VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore( - VkDevice device, - VkSemaphore semaphore, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent( - VkDevice device, - const VkEventCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkEvent *pEvent); - -VKAPI_ATTR void VKAPI_CALL vkDestroyEvent( - VkDevice device, - VkEvent event, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus( - VkDevice device, - VkEvent event); - -VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent( - VkDevice device, - VkEvent event); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent( - VkDevice device, - VkEvent event); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool( - VkDevice device, - const VkQueryPoolCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkQueryPool *pQueryPool); - -VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool( - VkDevice device, - VkQueryPool queryPool, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults( - VkDevice device, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - size_t dataSize, - void *pData, - VkDeviceSize stride, - VkQueryResultFlags flags); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer( - VkDevice device, - const VkBufferCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkBuffer *pBuffer); - -VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer( - VkDevice device, - VkBuffer buffer, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView( - VkDevice device, - const VkBufferViewCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkBufferView *pView); - -VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView( - VkDevice device, - VkBufferView bufferView, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage( - VkDevice device, - const VkImageCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkImage *pImage); - -VKAPI_ATTR void VKAPI_CALL vkDestroyImage( - VkDevice device, - VkImage image, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout( - VkDevice device, - VkImage image, - const VkImageSubresource *pSubresource, - VkSubresourceLayout *pLayout); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView( - VkDevice device, - const VkImageViewCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkImageView *pView); - -VKAPI_ATTR void VKAPI_CALL vkDestroyImageView( - VkDevice device, - VkImageView imageView, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule( - VkDevice device, - const VkShaderModuleCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkShaderModule *pShaderModule); - -VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule( - VkDevice device, - VkShaderModule shaderModule, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache( - VkDevice device, - const VkPipelineCacheCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkPipelineCache *pPipelineCache); - -VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache( - VkDevice device, - VkPipelineCache pipelineCache, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData( - VkDevice device, - VkPipelineCache pipelineCache, - size_t *pDataSize, - void *pData); - -VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches( - VkDevice device, - VkPipelineCache dstCache, - uint32_t srcCacheCount, - const VkPipelineCache *pSrcCaches); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines( - VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkGraphicsPipelineCreateInfo *pCreateInfos, - const VkAllocationCallbacks *pAllocator, - VkPipeline *pPipelines); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines( - VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkComputePipelineCreateInfo *pCreateInfos, - const VkAllocationCallbacks *pAllocator, - VkPipeline *pPipelines); - -VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline( - VkDevice device, - VkPipeline pipeline, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout( - VkDevice device, - const VkPipelineLayoutCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkPipelineLayout *pPipelineLayout); - -VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout( - VkDevice device, - VkPipelineLayout pipelineLayout, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler( - VkDevice device, - const VkSamplerCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkSampler *pSampler); - -VKAPI_ATTR void VKAPI_CALL vkDestroySampler( - VkDevice device, - VkSampler sampler, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout( - VkDevice device, - const VkDescriptorSetLayoutCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkDescriptorSetLayout *pSetLayout); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout( - VkDevice device, - VkDescriptorSetLayout descriptorSetLayout, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool( - VkDevice device, - const VkDescriptorPoolCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkDescriptorPool *pDescriptorPool); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool( - VkDevice device, - VkDescriptorPool descriptorPool, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool( - VkDevice device, - VkDescriptorPool descriptorPool, - VkDescriptorPoolResetFlags flags); - -VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets( - VkDevice device, - const VkDescriptorSetAllocateInfo *pAllocateInfo, - VkDescriptorSet *pDescriptorSets); - -VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets( - VkDevice device, - VkDescriptorPool descriptorPool, - uint32_t descriptorSetCount, - const VkDescriptorSet *pDescriptorSets); - -VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets( - VkDevice device, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet *pDescriptorWrites, - uint32_t descriptorCopyCount, - const VkCopyDescriptorSet *pDescriptorCopies); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer( - VkDevice device, - const VkFramebufferCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkFramebuffer *pFramebuffer); - -VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer( - VkDevice device, - VkFramebuffer framebuffer, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass( - VkDevice device, - const VkRenderPassCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkRenderPass *pRenderPass); - -VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass( - VkDevice device, - VkRenderPass renderPass, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity( - VkDevice device, - VkRenderPass renderPass, - VkExtent2D *pGranularity); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool( - VkDevice device, - const VkCommandPoolCreateInfo *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkCommandPool *pCommandPool); - -VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool( - VkDevice device, - VkCommandPool commandPool, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool( - VkDevice device, - VkCommandPool commandPool, - VkCommandPoolResetFlags flags); - -VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers( - VkDevice device, - const VkCommandBufferAllocateInfo *pAllocateInfo, - VkCommandBuffer *pCommandBuffers); - -VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers( - VkDevice device, - VkCommandPool commandPool, - uint32_t commandBufferCount, - const VkCommandBuffer *pCommandBuffers); - -VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer( - VkCommandBuffer commandBuffer, - const VkCommandBufferBeginInfo *pBeginInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer( - VkCommandBuffer commandBuffer); - -VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer( - VkCommandBuffer commandBuffer, - VkCommandBufferResetFlags flags); - -VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline( - VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipeline pipeline); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport( - VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewport *pViewports); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor( - VkCommandBuffer commandBuffer, - uint32_t firstScissor, - uint32_t scissorCount, - const VkRect2D *pScissors); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth( - VkCommandBuffer commandBuffer, - float lineWidth); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias( - VkCommandBuffer commandBuffer, - float depthBiasConstantFactor, - float depthBiasClamp, - float depthBiasSlopeFactor); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants( - VkCommandBuffer commandBuffer, - const float blendConstants[4]); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds( - VkCommandBuffer commandBuffer, - float minDepthBounds, - float maxDepthBounds); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask( - VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - uint32_t compareMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask( - VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - uint32_t writeMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference( - VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - uint32_t reference); - -VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets( - VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t firstSet, - uint32_t descriptorSetCount, - const VkDescriptorSet *pDescriptorSets, - uint32_t dynamicOffsetCount, - const uint32_t *pDynamicOffsets); - -VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkIndexType indexType); - -VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers( - VkCommandBuffer commandBuffer, - uint32_t firstBinding, - uint32_t bindingCount, - const VkBuffer *pBuffers, - const VkDeviceSize *pOffsets); - -VKAPI_ATTR void VKAPI_CALL vkCmdDraw( - VkCommandBuffer commandBuffer, - uint32_t vertexCount, - uint32_t instanceCount, - uint32_t firstVertex, - uint32_t firstInstance); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed( - VkCommandBuffer commandBuffer, - uint32_t indexCount, - uint32_t instanceCount, - uint32_t firstIndex, - int32_t vertexOffset, - uint32_t firstInstance); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - uint32_t drawCount, - uint32_t stride); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - uint32_t drawCount, - uint32_t stride); - -VKAPI_ATTR void VKAPI_CALL vkCmdDispatch( - VkCommandBuffer commandBuffer, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ); - -VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer( - VkCommandBuffer commandBuffer, - VkBuffer srcBuffer, - VkBuffer dstBuffer, - uint32_t regionCount, - const VkBufferCopy *pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage( - VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageCopy *pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage( - VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageBlit *pRegions, - VkFilter filter); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage( - VkCommandBuffer commandBuffer, - VkBuffer srcBuffer, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkBufferImageCopy *pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer( - VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkBuffer dstBuffer, - uint32_t regionCount, - const VkBufferImageCopy *pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer( - VkCommandBuffer commandBuffer, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize dataSize, - const void *pData); - -VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer( - VkCommandBuffer commandBuffer, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize size, - uint32_t data); - -VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage( - VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearColorValue *pColor, - uint32_t rangeCount, - const VkImageSubresourceRange *pRanges); - -VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage( - VkCommandBuffer commandBuffer, - VkImage image, - VkImageLayout imageLayout, - const VkClearDepthStencilValue *pDepthStencil, - uint32_t rangeCount, - const VkImageSubresourceRange *pRanges); - -VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments( - VkCommandBuffer commandBuffer, - uint32_t attachmentCount, - const VkClearAttachment *pAttachments, - uint32_t rectCount, - const VkClearRect *pRects); - -VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage( - VkCommandBuffer commandBuffer, - VkImage srcImage, - VkImageLayout srcImageLayout, - VkImage dstImage, - VkImageLayout dstImageLayout, - uint32_t regionCount, - const VkImageResolve *pRegions); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent( - VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags stageMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent( - VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags stageMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents( - VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent *pEvents, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - uint32_t memoryBarrierCount, - const VkMemoryBarrier *pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier *pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier *pImageMemoryBarriers); - -VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier( - VkCommandBuffer commandBuffer, - VkPipelineStageFlags srcStageMask, - VkPipelineStageFlags dstStageMask, - VkDependencyFlags dependencyFlags, - uint32_t memoryBarrierCount, - const VkMemoryBarrier *pMemoryBarriers, - uint32_t bufferMemoryBarrierCount, - const VkBufferMemoryBarrier *pBufferMemoryBarriers, - uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier *pImageMemoryBarriers); - -VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t query, - VkQueryControlFlags flags); - -VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t query); - -VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount); - -VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp( - VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkQueryPool queryPool, - uint32_t query); - -VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults( - VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount, - VkBuffer dstBuffer, - VkDeviceSize dstOffset, - VkDeviceSize stride, - VkQueryResultFlags flags); - -VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants( - VkCommandBuffer commandBuffer, - VkPipelineLayout layout, - VkShaderStageFlags stageFlags, - uint32_t offset, - uint32_t size, - const void *pValues); - -VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass( - VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo *pRenderPassBegin, - VkSubpassContents contents); - -VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass( - VkCommandBuffer commandBuffer, - VkSubpassContents contents); - -VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass( - VkCommandBuffer commandBuffer); - -VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands( - VkCommandBuffer commandBuffer, - uint32_t commandBufferCount, - const VkCommandBuffer *pCommandBuffers); -#endif - -#define VK_KHR_surface 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR) - -#define VK_KHR_SURFACE_SPEC_VERSION 25 -#define VK_KHR_SURFACE_EXTENSION_NAME "VK_KHR_surface" -#define VK_COLORSPACE_SRGB_NONLINEAR_KHR VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - -typedef enum VkColorSpaceKHR { - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0, - VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001, - VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002, - VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = 1000104003, - VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004, - VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005, - VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006, - VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007, - VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008, - VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009, - VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010, - VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011, - VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012, - VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013, - VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014, - VK_COLOR_SPACE_BEGIN_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - VK_COLOR_SPACE_END_RANGE_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR, - VK_COLOR_SPACE_RANGE_SIZE_KHR = (VK_COLOR_SPACE_SRGB_NONLINEAR_KHR - VK_COLOR_SPACE_SRGB_NONLINEAR_KHR + 1), // NOLINT: misc-redundant-expression - VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkColorSpaceKHR; - -typedef enum VkPresentModeKHR { - VK_PRESENT_MODE_IMMEDIATE_KHR = 0, - VK_PRESENT_MODE_MAILBOX_KHR = 1, - VK_PRESENT_MODE_FIFO_KHR = 2, - VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3, - VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000, - VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001, - VK_PRESENT_MODE_BEGIN_RANGE_KHR = VK_PRESENT_MODE_IMMEDIATE_KHR, - VK_PRESENT_MODE_END_RANGE_KHR = VK_PRESENT_MODE_FIFO_RELAXED_KHR, - VK_PRESENT_MODE_RANGE_SIZE_KHR = (VK_PRESENT_MODE_FIFO_RELAXED_KHR - VK_PRESENT_MODE_IMMEDIATE_KHR + 1), // NOLINT: misc-redundant-expression - VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkPresentModeKHR; - -typedef enum VkSurfaceTransformFlagBitsKHR { - VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001, - VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002, - VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004, - VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040, - VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080, - VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100, - VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSurfaceTransformFlagBitsKHR; -typedef VkFlags VkSurfaceTransformFlagsKHR; - -typedef enum VkCompositeAlphaFlagBitsKHR { - VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, - VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002, - VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004, - VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008, - VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkCompositeAlphaFlagBitsKHR; -typedef VkFlags VkCompositeAlphaFlagsKHR; - -typedef struct VkSurfaceCapabilitiesKHR { - uint32_t minImageCount; - uint32_t maxImageCount; - VkExtent2D currentExtent; - VkExtent2D minImageExtent; - VkExtent2D maxImageExtent; - uint32_t maxImageArrayLayers; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkSurfaceTransformFlagBitsKHR currentTransform; - VkCompositeAlphaFlagsKHR supportedCompositeAlpha; - VkImageUsageFlags supportedUsageFlags; -} VkSurfaceCapabilitiesKHR; - -typedef struct VkSurfaceFormatKHR { - VkFormat format; - VkColorSpaceKHR colorSpace; -} VkSurfaceFormatKHR; - -typedef void(VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32 *pSupported); -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR *pSurfaceCapabilities); -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pSurfaceFormatCount, VkSurfaceFormatKHR *pSurfaceFormats); -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pPresentModeCount, VkPresentModeKHR *pPresentModes); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR( - VkInstance instance, - VkSurfaceKHR surface, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR( - VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, - VkSurfaceKHR surface, - VkBool32 *pSupported); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilitiesKHR *pSurfaceCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t *pSurfaceFormatCount, - VkSurfaceFormatKHR *pSurfaceFormats); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t *pPresentModeCount, - VkPresentModeKHR *pPresentModes); -#endif - -#define VK_KHR_swapchain 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR) - -#define VK_KHR_SWAPCHAIN_SPEC_VERSION 68 -#define VK_KHR_SWAPCHAIN_EXTENSION_NAME "VK_KHR_swapchain" - -typedef enum VkSwapchainCreateFlagBitsKHR { - VK_SWAPCHAIN_CREATE_BIND_SFR_BIT_KHX = 0x00000001, - VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSwapchainCreateFlagBitsKHR; -typedef VkFlags VkSwapchainCreateFlagsKHR; - -typedef struct VkSwapchainCreateInfoKHR { - VkStructureType sType; - const void *pNext; - VkSwapchainCreateFlagsKHR flags; - VkSurfaceKHR surface; - uint32_t minImageCount; - VkFormat imageFormat; - VkColorSpaceKHR imageColorSpace; - VkExtent2D imageExtent; - uint32_t imageArrayLayers; - VkImageUsageFlags imageUsage; - VkSharingMode imageSharingMode; - uint32_t queueFamilyIndexCount; - const uint32_t *pQueueFamilyIndices; - VkSurfaceTransformFlagBitsKHR preTransform; - VkCompositeAlphaFlagBitsKHR compositeAlpha; - VkPresentModeKHR presentMode; - VkBool32 clipped; - VkSwapchainKHR oldSwapchain; -} VkSwapchainCreateInfoKHR; - -typedef struct VkPresentInfoKHR { - VkStructureType sType; - const void *pNext; - uint32_t waitSemaphoreCount; - const VkSemaphore *pWaitSemaphores; - uint32_t swapchainCount; - const VkSwapchainKHR *pSwapchains; - const uint32_t *pImageIndices; - VkResult *pResults; -} VkPresentInfoKHR; - -typedef VkResult(VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain); -typedef void(VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages); -typedef VkResult(VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex); -typedef VkResult(VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR *pPresentInfo); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR( - VkDevice device, - const VkSwapchainCreateInfoKHR *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkSwapchainKHR *pSwapchain); - -VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR( - VkDevice device, - VkSwapchainKHR swapchain, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR( - VkDevice device, - VkSwapchainKHR swapchain, - uint32_t *pSwapchainImageCount, - VkImage *pSwapchainImages); - -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR( - VkDevice device, - VkSwapchainKHR swapchain, - uint64_t timeout, - VkSemaphore semaphore, - VkFence fence, - uint32_t *pImageIndex); - -VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR( - VkQueue queue, - const VkPresentInfoKHR *pPresentInfo); -#endif - -#define VK_KHR_display 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR) - -#define VK_KHR_DISPLAY_SPEC_VERSION 21 -#define VK_KHR_DISPLAY_EXTENSION_NAME "VK_KHR_display" - -typedef enum VkDisplayPlaneAlphaFlagBitsKHR { - VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001, - VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002, - VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004, - VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008, - VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkDisplayPlaneAlphaFlagBitsKHR; -typedef VkFlags VkDisplayPlaneAlphaFlagsKHR; -typedef VkFlags VkDisplayModeCreateFlagsKHR; -typedef VkFlags VkDisplaySurfaceCreateFlagsKHR; - -typedef struct VkDisplayPropertiesKHR { - VkDisplayKHR display; - const char *displayName; - VkExtent2D physicalDimensions; - VkExtent2D physicalResolution; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkBool32 planeReorderPossible; - VkBool32 persistentContent; -} VkDisplayPropertiesKHR; - -typedef struct VkDisplayModeParametersKHR { - VkExtent2D visibleRegion; - uint32_t refreshRate; -} VkDisplayModeParametersKHR; - -typedef struct VkDisplayModePropertiesKHR { - VkDisplayModeKHR displayMode; - VkDisplayModeParametersKHR parameters; -} VkDisplayModePropertiesKHR; - -typedef struct VkDisplayModeCreateInfoKHR { - VkStructureType sType; - const void *pNext; - VkDisplayModeCreateFlagsKHR flags; - VkDisplayModeParametersKHR parameters; -} VkDisplayModeCreateInfoKHR; - -typedef struct VkDisplayPlaneCapabilitiesKHR { - VkDisplayPlaneAlphaFlagsKHR supportedAlpha; - VkOffset2D minSrcPosition; - VkOffset2D maxSrcPosition; - VkExtent2D minSrcExtent; - VkExtent2D maxSrcExtent; - VkOffset2D minDstPosition; - VkOffset2D maxDstPosition; - VkExtent2D minDstExtent; - VkExtent2D maxDstExtent; -} VkDisplayPlaneCapabilitiesKHR; - -typedef struct VkDisplayPlanePropertiesKHR { - VkDisplayKHR currentDisplay; - uint32_t currentStackIndex; -} VkDisplayPlanePropertiesKHR; - -typedef struct VkDisplaySurfaceCreateInfoKHR { - VkStructureType sType; - const void *pNext; - VkDisplaySurfaceCreateFlagsKHR flags; - VkDisplayModeKHR displayMode; - uint32_t planeIndex; - uint32_t planeStackIndex; - VkSurfaceTransformFlagBitsKHR transform; - float globalAlpha; - VkDisplayPlaneAlphaFlagBitsKHR alphaMode; - VkExtent2D imageExtent; -} VkDisplaySurfaceCreateInfoKHR; - -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPropertiesKHR *pProperties); -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkDisplayPlanePropertiesKHR *pProperties); -typedef VkResult(VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t *pDisplayCount, VkDisplayKHR *pDisplays); -typedef VkResult(VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t *pPropertyCount, VkDisplayModePropertiesKHR *pProperties); -typedef VkResult(VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDisplayModeKHR *pMode); -typedef VkResult(VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR *pCapabilities); -typedef VkResult(VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR( - VkPhysicalDevice physicalDevice, - uint32_t *pPropertyCount, - VkDisplayPropertiesKHR *pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR( - VkPhysicalDevice physicalDevice, - uint32_t *pPropertyCount, - VkDisplayPlanePropertiesKHR *pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR( - VkPhysicalDevice physicalDevice, - uint32_t planeIndex, - uint32_t *pDisplayCount, - VkDisplayKHR *pDisplays); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - uint32_t *pPropertyCount, - VkDisplayModePropertiesKHR *pProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display, - const VkDisplayModeCreateInfoKHR *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkDisplayModeKHR *pMode); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR( - VkPhysicalDevice physicalDevice, - VkDisplayModeKHR mode, - uint32_t planeIndex, - VkDisplayPlaneCapabilitiesKHR *pCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR( - VkInstance instance, - const VkDisplaySurfaceCreateInfoKHR *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkSurfaceKHR *pSurface); -#endif - -#define VK_KHR_display_swapchain 1 -#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 9 -#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain" - -typedef struct VkDisplayPresentInfoKHR { - VkStructureType sType; - const void *pNext; - VkRect2D srcRect; - VkRect2D dstRect; - VkBool32 persistent; -} VkDisplayPresentInfoKHR; - -typedef VkResult(VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR *pCreateInfos, const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchains); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR( - VkDevice device, - uint32_t swapchainCount, - const VkSwapchainCreateInfoKHR *pCreateInfos, - const VkAllocationCallbacks *pAllocator, - VkSwapchainKHR *pSwapchains); -#endif - -#define VK_KHR_sampler_mirror_clamp_to_edge 1 -#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 1 -#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge" - -#define VK_KHR_get_physical_device_properties2 1 -#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 1 -#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2" - -typedef struct VkPhysicalDeviceFeatures2KHR { - VkStructureType sType; - void *pNext; - VkPhysicalDeviceFeatures features; -} VkPhysicalDeviceFeatures2KHR; - -typedef struct VkPhysicalDeviceProperties2KHR { - VkStructureType sType; - void *pNext; - VkPhysicalDeviceProperties properties; -} VkPhysicalDeviceProperties2KHR; - -typedef struct VkFormatProperties2KHR { - VkStructureType sType; - void *pNext; - VkFormatProperties formatProperties; -} VkFormatProperties2KHR; - -typedef struct VkImageFormatProperties2KHR { - VkStructureType sType; - void *pNext; - VkImageFormatProperties imageFormatProperties; -} VkImageFormatProperties2KHR; - -typedef struct VkPhysicalDeviceImageFormatInfo2KHR { - VkStructureType sType; - const void *pNext; - VkFormat format; - VkImageType type; - VkImageTiling tiling; - VkImageUsageFlags usage; - VkImageCreateFlags flags; -} VkPhysicalDeviceImageFormatInfo2KHR; - -typedef struct VkQueueFamilyProperties2KHR { - VkStructureType sType; - void *pNext; - VkQueueFamilyProperties queueFamilyProperties; -} VkQueueFamilyProperties2KHR; - -typedef struct VkPhysicalDeviceMemoryProperties2KHR { - VkStructureType sType; - void *pNext; - VkPhysicalDeviceMemoryProperties memoryProperties; -} VkPhysicalDeviceMemoryProperties2KHR; - -typedef struct VkSparseImageFormatProperties2KHR { - VkStructureType sType; - void *pNext; - VkSparseImageFormatProperties properties; -} VkSparseImageFormatProperties2KHR; - -typedef struct VkPhysicalDeviceSparseImageFormatInfo2KHR { - VkStructureType sType; - const void *pNext; - VkFormat format; - VkImageType type; - VkSampleCountFlagBits samples; - VkImageUsageFlags usage; - VkImageTiling tiling; -} VkPhysicalDeviceSparseImageFormatInfo2KHR; - -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2KHR *pFeatures); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2KHR *pProperties); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2KHR *pFormatProperties); -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2KHR *pImageFormatInfo, VkImageFormatProperties2KHR *pImageFormatProperties); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t *pQueueFamilyPropertyCount, VkQueueFamilyProperties2KHR *pQueueFamilyProperties); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2KHR *pMemoryProperties); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2KHR *pFormatInfo, uint32_t *pPropertyCount, VkSparseImageFormatProperties2KHR *pProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures2KHR *pFeatures); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties2KHR *pProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties2KHR *pFormatProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2KHR *pImageFormatInfo, - VkImageFormatProperties2KHR *pImageFormatProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR( - VkPhysicalDevice physicalDevice, - uint32_t *pQueueFamilyPropertyCount, - VkQueueFamilyProperties2KHR *pQueueFamilyProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR( - VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2KHR *pMemoryProperties); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2KHR *pFormatInfo, - uint32_t *pPropertyCount, - VkSparseImageFormatProperties2KHR *pProperties); -#endif - -#define VK_KHR_shader_draw_parameters 1 -#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1 -#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters" - -#define VK_KHR_maintenance1 1 -#define VK_KHR_MAINTENANCE1_SPEC_VERSION 1 -#define VK_KHR_MAINTENANCE1_EXTENSION_NAME "VK_KHR_maintenance1" - -typedef VkFlags VkCommandPoolTrimFlagsKHR; - -typedef void(VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlagsKHR flags); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR( - VkDevice device, - VkCommandPool commandPool, - VkCommandPoolTrimFlagsKHR flags); -#endif - -#define VK_KHR_external_memory_capabilities 1 -#define VK_LUID_SIZE_KHR 8 -#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities" - -typedef enum VkExternalMemoryHandleTypeFlagBitsKHR { - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = 0x00000001, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = 0x00000002, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = 0x00000004, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = 0x00000008, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = 0x00000010, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = 0x00000020, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = 0x00000040, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalMemoryHandleTypeFlagBitsKHR; -typedef VkFlags VkExternalMemoryHandleTypeFlagsKHR; - -typedef enum VkExternalMemoryFeatureFlagBitsKHR { - VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = 0x00000001, - VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = 0x00000002, - VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = 0x00000004, - VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalMemoryFeatureFlagBitsKHR; -typedef VkFlags VkExternalMemoryFeatureFlagsKHR; - -typedef struct VkExternalMemoryPropertiesKHR { - VkExternalMemoryFeatureFlagsKHR externalMemoryFeatures; - VkExternalMemoryHandleTypeFlagsKHR exportFromImportedHandleTypes; - VkExternalMemoryHandleTypeFlagsKHR compatibleHandleTypes; -} VkExternalMemoryPropertiesKHR; - -typedef struct VkPhysicalDeviceExternalImageFormatInfoKHR { - VkStructureType sType; - const void *pNext; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; -} VkPhysicalDeviceExternalImageFormatInfoKHR; - -typedef struct VkExternalImageFormatPropertiesKHR { - VkStructureType sType; - void *pNext; - VkExternalMemoryPropertiesKHR externalMemoryProperties; -} VkExternalImageFormatPropertiesKHR; - -typedef struct VkPhysicalDeviceExternalBufferInfoKHR { - VkStructureType sType; - const void *pNext; - VkBufferCreateFlags flags; - VkBufferUsageFlags usage; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; -} VkPhysicalDeviceExternalBufferInfoKHR; - -typedef struct VkExternalBufferPropertiesKHR { - VkStructureType sType; - void *pNext; - VkExternalMemoryPropertiesKHR externalMemoryProperties; -} VkExternalBufferPropertiesKHR; - -typedef struct VkPhysicalDeviceIDPropertiesKHR { - VkStructureType sType; - void *pNext; - uint8_t deviceUUID[VK_UUID_SIZE]; - uint8_t driverUUID[VK_UUID_SIZE]; - uint8_t deviceLUID[VK_LUID_SIZE_KHR]; - uint32_t deviceNodeMask; - VkBool32 deviceLUIDValid; -} VkPhysicalDeviceIDPropertiesKHR; - -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfoKHR *pExternalBufferInfo, VkExternalBufferPropertiesKHR *pExternalBufferProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfoKHR *pExternalBufferInfo, - VkExternalBufferPropertiesKHR *pExternalBufferProperties); -#endif - -#define VK_KHR_external_memory 1 -#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory" -#define VK_QUEUE_FAMILY_EXTERNAL_KHR (~0U - 1) - -typedef struct VkExternalMemoryImageCreateInfoKHR { - VkStructureType sType; - const void *pNext; - VkExternalMemoryHandleTypeFlagsKHR handleTypes; -} VkExternalMemoryImageCreateInfoKHR; - -typedef struct VkExternalMemoryBufferCreateInfoKHR { - VkStructureType sType; - const void *pNext; - VkExternalMemoryHandleTypeFlagsKHR handleTypes; -} VkExternalMemoryBufferCreateInfoKHR; - -typedef struct VkExportMemoryAllocateInfoKHR { - VkStructureType sType; - const void *pNext; - VkExternalMemoryHandleTypeFlagsKHR handleTypes; -} VkExportMemoryAllocateInfoKHR; - -#define VK_KHR_external_memory_fd 1 -#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd" - -typedef struct VkImportMemoryFdInfoKHR { - VkStructureType sType; - const void *pNext; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; - int fd; -} VkImportMemoryFdInfoKHR; - -typedef struct VkMemoryFdPropertiesKHR { - VkStructureType sType; - void *pNext; - uint32_t memoryTypeBits; -} VkMemoryFdPropertiesKHR; - -typedef struct VkMemoryGetFdInfoKHR { - VkStructureType sType; - const void *pNext; - VkDeviceMemory memory; - VkExternalMemoryHandleTypeFlagBitsKHR handleType; -} VkMemoryGetFdInfoKHR; - -typedef VkResult(VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR *pGetFdInfo, int *pFd); -typedef VkResult(VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBitsKHR handleType, int fd, VkMemoryFdPropertiesKHR *pMemoryFdProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR( - VkDevice device, - const VkMemoryGetFdInfoKHR *pGetFdInfo, - int *pFd); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR( - VkDevice device, - VkExternalMemoryHandleTypeFlagBitsKHR handleType, - int fd, - VkMemoryFdPropertiesKHR *pMemoryFdProperties); -#endif - -#define VK_KHR_external_semaphore_capabilities 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities" - -typedef enum VkExternalSemaphoreHandleTypeFlagBitsKHR { - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = 0x00000001, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = 0x00000002, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = 0x00000004, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = 0x00000008, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = 0x00000010, - VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalSemaphoreHandleTypeFlagBitsKHR; -typedef VkFlags VkExternalSemaphoreHandleTypeFlagsKHR; - -typedef enum VkExternalSemaphoreFeatureFlagBitsKHR { - VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = 0x00000001, - VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = 0x00000002, - VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalSemaphoreFeatureFlagBitsKHR; -typedef VkFlags VkExternalSemaphoreFeatureFlagsKHR; - -typedef struct VkPhysicalDeviceExternalSemaphoreInfoKHR { - VkStructureType sType; - const void *pNext; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; -} VkPhysicalDeviceExternalSemaphoreInfoKHR; - -typedef struct VkExternalSemaphorePropertiesKHR { - VkStructureType sType; - void *pNext; - VkExternalSemaphoreHandleTypeFlagsKHR exportFromImportedHandleTypes; - VkExternalSemaphoreHandleTypeFlagsKHR compatibleHandleTypes; - VkExternalSemaphoreFeatureFlagsKHR externalSemaphoreFeatures; -} VkExternalSemaphorePropertiesKHR; - -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfoKHR *pExternalSemaphoreInfo, VkExternalSemaphorePropertiesKHR *pExternalSemaphoreProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfoKHR *pExternalSemaphoreInfo, - VkExternalSemaphorePropertiesKHR *pExternalSemaphoreProperties); -#endif - -#define VK_KHR_external_semaphore 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore" - -typedef enum VkSemaphoreImportFlagBitsKHR { - VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = 0x00000001, - VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkSemaphoreImportFlagBitsKHR; -typedef VkFlags VkSemaphoreImportFlagsKHR; - -typedef struct VkExportSemaphoreCreateInfoKHR { - VkStructureType sType; - const void *pNext; - VkExternalSemaphoreHandleTypeFlagsKHR handleTypes; -} VkExportSemaphoreCreateInfoKHR; - -#define VK_KHR_external_semaphore_fd 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd" - -typedef struct VkImportSemaphoreFdInfoKHR { - VkStructureType sType; - const void *pNext; - VkSemaphore semaphore; - VkSemaphoreImportFlagsKHR flags; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; - int fd; -} VkImportSemaphoreFdInfoKHR; - -typedef struct VkSemaphoreGetFdInfoKHR { - VkStructureType sType; - const void *pNext; - VkSemaphore semaphore; - VkExternalSemaphoreHandleTypeFlagBitsKHR handleType; -} VkSemaphoreGetFdInfoKHR; - -typedef VkResult(VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo); -typedef VkResult(VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR *pGetFdInfo, int *pFd); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR( - VkDevice device, - const VkImportSemaphoreFdInfoKHR *pImportSemaphoreFdInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR( - VkDevice device, - const VkSemaphoreGetFdInfoKHR *pGetFdInfo, - int *pFd); -#endif - -#define VK_KHR_push_descriptor 1 -#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 1 -#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor" - -typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR { - VkStructureType sType; - void *pNext; - uint32_t maxPushDescriptors; -} VkPhysicalDevicePushDescriptorPropertiesKHR; - -typedef void(VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet *pDescriptorWrites); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR( - VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipelineLayout layout, - uint32_t set, - uint32_t descriptorWriteCount, - const VkWriteDescriptorSet *pDescriptorWrites); -#endif - -#define VK_KHR_16bit_storage 1 -#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1 -#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage" - -typedef struct VkPhysicalDevice16BitStorageFeaturesKHR { - VkStructureType sType; - void *pNext; - VkBool32 storageBuffer16BitAccess; - VkBool32 uniformAndStorageBuffer16BitAccess; - VkBool32 storagePushConstant16; - VkBool32 storageInputOutput16; -} VkPhysicalDevice16BitStorageFeaturesKHR; - -#define VK_KHR_incremental_present 1 -#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 1 -#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present" - -typedef struct VkRectLayerKHR { - VkOffset2D offset; - VkExtent2D extent; - uint32_t layer; -} VkRectLayerKHR; - -typedef struct VkPresentRegionKHR { - uint32_t rectangleCount; - const VkRectLayerKHR *pRectangles; -} VkPresentRegionKHR; - -typedef struct VkPresentRegionsKHR { - VkStructureType sType; - const void *pNext; - uint32_t swapchainCount; - const VkPresentRegionKHR *pRegions; -} VkPresentRegionsKHR; - -#define VK_KHR_descriptor_update_template 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplateKHR) - -#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1 -#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template" - -typedef enum VkDescriptorUpdateTemplateTypeKHR { - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = 0, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_BEGIN_RANGE_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_END_RANGE_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR, - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_RANGE_SIZE_KHR = (VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR + 1), // NOLINT: misc-redundant-expression - VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF -} VkDescriptorUpdateTemplateTypeKHR; - -typedef VkFlags VkDescriptorUpdateTemplateCreateFlagsKHR; - -typedef struct VkDescriptorUpdateTemplateEntryKHR { - uint32_t dstBinding; - uint32_t dstArrayElement; - uint32_t descriptorCount; - VkDescriptorType descriptorType; - size_t offset; - size_t stride; -} VkDescriptorUpdateTemplateEntryKHR; - -typedef struct VkDescriptorUpdateTemplateCreateInfoKHR { - VkStructureType sType; - void *pNext; - VkDescriptorUpdateTemplateCreateFlagsKHR flags; - uint32_t descriptorUpdateEntryCount; - const VkDescriptorUpdateTemplateEntryKHR *pDescriptorUpdateEntries; - VkDescriptorUpdateTemplateTypeKHR templateType; - VkDescriptorSetLayout descriptorSetLayout; - VkPipelineBindPoint pipelineBindPoint; - VkPipelineLayout pipelineLayout; - uint32_t set; -} VkDescriptorUpdateTemplateCreateInfoKHR; - -typedef VkResult(VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate); -typedef void(VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const VkAllocationCallbacks *pAllocator); -typedef void(VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, const void *pData); -typedef void(VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void *pData); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR( - VkDevice device, - const VkDescriptorUpdateTemplateCreateInfoKHR *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkDescriptorUpdateTemplateKHR *pDescriptorUpdateTemplate); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR( - VkDevice device, - VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR( - VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, - const void *pData); - -VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR( - VkCommandBuffer commandBuffer, - VkDescriptorUpdateTemplateKHR descriptorUpdateTemplate, - VkPipelineLayout layout, - uint32_t set, - const void *pData); -#endif - -#define VK_KHR_shared_presentable_image 1 -#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1 -#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image" - -typedef struct VkSharedPresentSurfaceCapabilitiesKHR { - VkStructureType sType; - void *pNext; - VkImageUsageFlags sharedPresentSupportedUsageFlags; -} VkSharedPresentSurfaceCapabilitiesKHR; - -typedef VkResult(VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR( - VkDevice device, - VkSwapchainKHR swapchain); -#endif - -#define VK_KHR_external_fence_capabilities 1 -#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities" - -typedef enum VkExternalFenceHandleTypeFlagBitsKHR { - VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = 0x00000001, - VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = 0x00000002, - VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = 0x00000004, - VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = 0x00000008, - VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalFenceHandleTypeFlagBitsKHR; -typedef VkFlags VkExternalFenceHandleTypeFlagsKHR; - -typedef enum VkExternalFenceFeatureFlagBitsKHR { - VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = 0x00000001, - VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = 0x00000002, - VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkExternalFenceFeatureFlagBitsKHR; -typedef VkFlags VkExternalFenceFeatureFlagsKHR; - -typedef struct VkPhysicalDeviceExternalFenceInfoKHR { - VkStructureType sType; - const void *pNext; - VkExternalFenceHandleTypeFlagBitsKHR handleType; -} VkPhysicalDeviceExternalFenceInfoKHR; - -typedef struct VkExternalFencePropertiesKHR { - VkStructureType sType; - void *pNext; - VkExternalFenceHandleTypeFlagsKHR exportFromImportedHandleTypes; - VkExternalFenceHandleTypeFlagsKHR compatibleHandleTypes; - VkExternalFenceFeatureFlagsKHR externalFenceFeatures; -} VkExternalFencePropertiesKHR; - -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfoKHR *pExternalFenceInfo, VkExternalFencePropertiesKHR *pExternalFenceProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfoKHR *pExternalFenceInfo, - VkExternalFencePropertiesKHR *pExternalFenceProperties); -#endif - -#define VK_KHR_external_fence 1 -#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence" - -typedef enum VkFenceImportFlagBitsKHR { - VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = 0x00000001, - VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF -} VkFenceImportFlagBitsKHR; -typedef VkFlags VkFenceImportFlagsKHR; - -typedef struct VkExportFenceCreateInfoKHR { - VkStructureType sType; - const void *pNext; - VkExternalFenceHandleTypeFlagsKHR handleTypes; -} VkExportFenceCreateInfoKHR; - -#define VK_KHR_external_fence_fd 1 -#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1 -#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd" - -typedef struct VkImportFenceFdInfoKHR { - VkStructureType sType; - const void *pNext; - VkFence fence; - VkFenceImportFlagsKHR flags; - VkExternalFenceHandleTypeFlagBitsKHR handleType; - int fd; -} VkImportFenceFdInfoKHR; - -typedef struct VkFenceGetFdInfoKHR { - VkStructureType sType; - const void *pNext; - VkFence fence; - VkExternalFenceHandleTypeFlagBitsKHR handleType; -} VkFenceGetFdInfoKHR; - -typedef VkResult(VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR *pImportFenceFdInfo); -typedef VkResult(VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR *pGetFdInfo, int *pFd); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR( - VkDevice device, - const VkImportFenceFdInfoKHR *pImportFenceFdInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR( - VkDevice device, - const VkFenceGetFdInfoKHR *pGetFdInfo, - int *pFd); -#endif - -#define VK_KHR_get_surface_capabilities2 1 -#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1 -#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2" - -typedef struct VkPhysicalDeviceSurfaceInfo2KHR { - VkStructureType sType; - const void *pNext; - VkSurfaceKHR surface; -} VkPhysicalDeviceSurfaceInfo2KHR; - -typedef struct VkSurfaceCapabilities2KHR { - VkStructureType sType; - void *pNext; - VkSurfaceCapabilitiesKHR surfaceCapabilities; -} VkSurfaceCapabilities2KHR; - -typedef struct VkSurfaceFormat2KHR { - VkStructureType sType; - void *pNext; - VkSurfaceFormatKHR surfaceFormat; -} VkSurfaceFormat2KHR; - -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, VkSurfaceCapabilities2KHR *pSurfaceCapabilities); -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, uint32_t *pSurfaceFormatCount, VkSurfaceFormat2KHR *pSurfaceFormats); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, - VkSurfaceCapabilities2KHR *pSurfaceCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR *pSurfaceInfo, - uint32_t *pSurfaceFormatCount, - VkSurfaceFormat2KHR *pSurfaceFormats); -#endif - -#define VK_KHR_variable_pointers 1 -#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1 -#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers" - -typedef struct VkPhysicalDeviceVariablePointerFeaturesKHR { - VkStructureType sType; - void *pNext; - VkBool32 variablePointersStorageBuffer; - VkBool32 variablePointers; -} VkPhysicalDeviceVariablePointerFeaturesKHR; - -#define VK_KHR_dedicated_allocation 1 -#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3 -#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation" - -typedef struct VkMemoryDedicatedRequirementsKHR { - VkStructureType sType; - void *pNext; - VkBool32 prefersDedicatedAllocation; - VkBool32 requiresDedicatedAllocation; -} VkMemoryDedicatedRequirementsKHR; - -typedef struct VkMemoryDedicatedAllocateInfoKHR { - VkStructureType sType; - const void *pNext; - VkImage image; - VkBuffer buffer; -} VkMemoryDedicatedAllocateInfoKHR; - -#define VK_KHR_storage_buffer_storage_class 1 -#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1 -#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class" - -#define VK_KHR_relaxed_block_layout 1 -#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1 -#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout" - -#define VK_KHR_get_memory_requirements2 1 -#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1 -#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2" - -typedef struct VkBufferMemoryRequirementsInfo2KHR { - VkStructureType sType; - const void *pNext; - VkBuffer buffer; -} VkBufferMemoryRequirementsInfo2KHR; - -typedef struct VkImageMemoryRequirementsInfo2KHR { - VkStructureType sType; - const void *pNext; - VkImage image; -} VkImageMemoryRequirementsInfo2KHR; - -typedef struct VkImageSparseMemoryRequirementsInfo2KHR { - VkStructureType sType; - const void *pNext; - VkImage image; -} VkImageSparseMemoryRequirementsInfo2KHR; - -typedef struct VkMemoryRequirements2KHR { - VkStructureType sType; - void *pNext; - VkMemoryRequirements memoryRequirements; -} VkMemoryRequirements2KHR; - -typedef struct VkSparseImageMemoryRequirements2KHR { - VkStructureType sType; - void *pNext; - VkSparseImageMemoryRequirements memoryRequirements; -} VkSparseImageMemoryRequirements2KHR; - -typedef void(VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2KHR *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements); -typedef void(VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2KHR *pInfo, VkMemoryRequirements2KHR *pMemoryRequirements); -typedef void(VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, uint32_t *pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR( - VkDevice device, - const VkImageMemoryRequirementsInfo2KHR *pInfo, - VkMemoryRequirements2KHR *pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR( - VkDevice device, - const VkBufferMemoryRequirementsInfo2KHR *pInfo, - VkMemoryRequirements2KHR *pMemoryRequirements); - -VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR( - VkDevice device, - const VkImageSparseMemoryRequirementsInfo2KHR *pInfo, - uint32_t *pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2KHR *pSparseMemoryRequirements); -#endif - -#define VK_EXT_debug_report 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT) - -#define VK_EXT_DEBUG_REPORT_SPEC_VERSION 8 -#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report" -#define VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT -#define VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT - -typedef enum VkDebugReportObjectTypeEXT { - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0, - VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1, - VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2, - VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3, - VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4, - VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5, - VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6, - VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7, - VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10, - VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11, - VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12, - VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13, - VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14, - VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17, - VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18, - VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20, - VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23, - VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24, - VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25, - VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26, - VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27, - VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28, - VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29, - VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30, - VK_DEBUG_REPORT_OBJECT_TYPE_OBJECT_TABLE_NVX_EXT = 31, - VK_DEBUG_REPORT_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NVX_EXT = 32, - VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = 33, - VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = 1000085000, - VK_DEBUG_REPORT_OBJECT_TYPE_BEGIN_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_END_RANGE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT, - VK_DEBUG_REPORT_OBJECT_TYPE_RANGE_SIZE_EXT = (VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT - VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT + 1), // NOLINT: misc-redundant-expression - VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugReportObjectTypeEXT; - -typedef enum VkDebugReportFlagBitsEXT { - VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001, - VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002, - VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004, - VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008, - VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010, - VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDebugReportFlagBitsEXT; -typedef VkFlags VkDebugReportFlagsEXT; - -typedef VkBool32(VKAPI_PTR *PFN_vkDebugReportCallbackEXT)( - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char *pLayerPrefix, - const char *pMessage, - void *pUserData); - -typedef struct VkDebugReportCallbackCreateInfoEXT { - VkStructureType sType; - const void *pNext; - VkDebugReportFlagsEXT flags; - PFN_vkDebugReportCallbackEXT pfnCallback; - void *pUserData; -} VkDebugReportCallbackCreateInfoEXT; - -typedef VkResult(VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDebugReportCallbackEXT *pCallback); -typedef void(VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks *pAllocator); -typedef void(VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char *pLayerPrefix, const char *pMessage); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT( - VkInstance instance, - const VkDebugReportCallbackCreateInfoEXT *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkDebugReportCallbackEXT *pCallback); - -VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT( - VkInstance instance, - VkDebugReportCallbackEXT callback, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT( - VkInstance instance, - VkDebugReportFlagsEXT flags, - VkDebugReportObjectTypeEXT objectType, - uint64_t object, - size_t location, - int32_t messageCode, - const char *pLayerPrefix, - const char *pMessage); -#endif - -#define VK_NV_glsl_shader 1 -#define VK_NV_GLSL_SHADER_SPEC_VERSION 1 -#define VK_NV_GLSL_SHADER_EXTENSION_NAME "VK_NV_glsl_shader" - -#define VK_EXT_depth_range_unrestricted 1 -#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1 -#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted" - -#define VK_IMG_filter_cubic 1 -#define VK_IMG_FILTER_CUBIC_SPEC_VERSION 1 -#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic" - -#define VK_AMD_rasterization_order 1 -#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1 -#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order" - -typedef enum VkRasterizationOrderAMD { - VK_RASTERIZATION_ORDER_STRICT_AMD = 0, - VK_RASTERIZATION_ORDER_RELAXED_AMD = 1, - VK_RASTERIZATION_ORDER_BEGIN_RANGE_AMD = VK_RASTERIZATION_ORDER_STRICT_AMD, - VK_RASTERIZATION_ORDER_END_RANGE_AMD = VK_RASTERIZATION_ORDER_RELAXED_AMD, - VK_RASTERIZATION_ORDER_RANGE_SIZE_AMD = (VK_RASTERIZATION_ORDER_RELAXED_AMD - VK_RASTERIZATION_ORDER_STRICT_AMD + 1), // NOLINT: misc-redundant-expression - VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF -} VkRasterizationOrderAMD; - -typedef struct VkPipelineRasterizationStateRasterizationOrderAMD { - VkStructureType sType; - const void *pNext; - VkRasterizationOrderAMD rasterizationOrder; -} VkPipelineRasterizationStateRasterizationOrderAMD; - -#define VK_AMD_shader_trinary_minmax 1 -#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1 -#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax" - -#define VK_AMD_shader_explicit_vertex_parameter 1 -#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1 -#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter" - -#define VK_EXT_debug_marker 1 -#define VK_EXT_DEBUG_MARKER_SPEC_VERSION 4 -#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker" - -typedef struct VkDebugMarkerObjectNameInfoEXT { - VkStructureType sType; - const void *pNext; - VkDebugReportObjectTypeEXT objectType; - uint64_t object; - const char *pObjectName; -} VkDebugMarkerObjectNameInfoEXT; - -typedef struct VkDebugMarkerObjectTagInfoEXT { - VkStructureType sType; - const void *pNext; - VkDebugReportObjectTypeEXT objectType; - uint64_t object; - uint64_t tagName; - size_t tagSize; - const void *pTag; -} VkDebugMarkerObjectTagInfoEXT; - -typedef struct VkDebugMarkerMarkerInfoEXT { - VkStructureType sType; - const void *pNext; - const char *pMarkerName; - float color[4]; -} VkDebugMarkerMarkerInfoEXT; - -typedef VkResult(VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT *pTagInfo); -typedef VkResult(VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT *pNameInfo); -typedef void(VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT *pMarkerInfo); -typedef void(VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer); -typedef void(VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT *pMarkerInfo); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT( - VkDevice device, - const VkDebugMarkerObjectTagInfoEXT *pTagInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT( - VkDevice device, - const VkDebugMarkerObjectNameInfoEXT *pNameInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT( - VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT *pMarkerInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT( - VkCommandBuffer commandBuffer); - -VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT( - VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT *pMarkerInfo); -#endif - -#define VK_AMD_gcn_shader 1 -#define VK_AMD_GCN_SHADER_SPEC_VERSION 1 -#define VK_AMD_GCN_SHADER_EXTENSION_NAME "VK_AMD_gcn_shader" - -#define VK_NV_dedicated_allocation 1 -#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1 -#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation" - -typedef struct VkDedicatedAllocationImageCreateInfoNV { - VkStructureType sType; - const void *pNext; - VkBool32 dedicatedAllocation; -} VkDedicatedAllocationImageCreateInfoNV; - -typedef struct VkDedicatedAllocationBufferCreateInfoNV { - VkStructureType sType; - const void *pNext; - VkBool32 dedicatedAllocation; -} VkDedicatedAllocationBufferCreateInfoNV; - -typedef struct VkDedicatedAllocationMemoryAllocateInfoNV { - VkStructureType sType; - const void *pNext; - VkImage image; - VkBuffer buffer; -} VkDedicatedAllocationMemoryAllocateInfoNV; - -#define VK_AMD_draw_indirect_count 1 -#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 1 -#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count" - -typedef void(VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); -typedef void(VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); - -VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD( - VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkBuffer countBuffer, - VkDeviceSize countBufferOffset, - uint32_t maxDrawCount, - uint32_t stride); -#endif - -#define VK_AMD_negative_viewport_height 1 -#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1 -#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height" - -#define VK_AMD_gpu_shader_half_float 1 -#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 1 -#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float" - -#define VK_AMD_shader_ballot 1 -#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1 -#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot" - -#define VK_AMD_texture_gather_bias_lod 1 -#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1 -#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod" - -typedef struct VkTextureLODGatherFormatPropertiesAMD { - VkStructureType sType; - void *pNext; - VkBool32 supportsTextureGatherLODBiasAMD; -} VkTextureLODGatherFormatPropertiesAMD; - -#define VK_KHX_multiview 1 -#define VK_KHX_MULTIVIEW_SPEC_VERSION 1 -#define VK_KHX_MULTIVIEW_EXTENSION_NAME "VK_KHX_multiview" - -typedef struct VkRenderPassMultiviewCreateInfoKHX { - VkStructureType sType; - const void *pNext; - uint32_t subpassCount; - const uint32_t *pViewMasks; - uint32_t dependencyCount; - const int32_t *pViewOffsets; - uint32_t correlationMaskCount; - const uint32_t *pCorrelationMasks; -} VkRenderPassMultiviewCreateInfoKHX; - -typedef struct VkPhysicalDeviceMultiviewFeaturesKHX { - VkStructureType sType; - void *pNext; - VkBool32 multiview; - VkBool32 multiviewGeometryShader; - VkBool32 multiviewTessellationShader; -} VkPhysicalDeviceMultiviewFeaturesKHX; - -typedef struct VkPhysicalDeviceMultiviewPropertiesKHX { - VkStructureType sType; - void *pNext; - uint32_t maxMultiviewViewCount; - uint32_t maxMultiviewInstanceIndex; -} VkPhysicalDeviceMultiviewPropertiesKHX; - -#define VK_IMG_format_pvrtc 1 -#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION 1 -#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc" - -#define VK_NV_external_memory_capabilities 1 -#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities" - -typedef enum VkExternalMemoryHandleTypeFlagBitsNV { - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008, - VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkExternalMemoryHandleTypeFlagBitsNV; -typedef VkFlags VkExternalMemoryHandleTypeFlagsNV; - -typedef enum VkExternalMemoryFeatureFlagBitsNV { - VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001, - VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002, - VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004, - VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF -} VkExternalMemoryFeatureFlagBitsNV; -typedef VkFlags VkExternalMemoryFeatureFlagsNV; - -typedef struct VkExternalImageFormatPropertiesNV { - VkImageFormatProperties imageFormatProperties; - VkExternalMemoryFeatureFlagsNV externalMemoryFeatures; - VkExternalMemoryHandleTypeFlagsNV exportFromImportedHandleTypes; - VkExternalMemoryHandleTypeFlagsNV compatibleHandleTypes; -} VkExternalImageFormatPropertiesNV; - -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV *pExternalImageFormatProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV( - VkPhysicalDevice physicalDevice, - VkFormat format, - VkImageType type, - VkImageTiling tiling, - VkImageUsageFlags usage, - VkImageCreateFlags flags, - VkExternalMemoryHandleTypeFlagsNV externalHandleType, - VkExternalImageFormatPropertiesNV *pExternalImageFormatProperties); -#endif - -#define VK_NV_external_memory 1 -#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1 -#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory" - -typedef struct VkExternalMemoryImageCreateInfoNV { - VkStructureType sType; - const void *pNext; - VkExternalMemoryHandleTypeFlagsNV handleTypes; -} VkExternalMemoryImageCreateInfoNV; - -typedef struct VkExportMemoryAllocateInfoNV { - VkStructureType sType; - const void *pNext; - VkExternalMemoryHandleTypeFlagsNV handleTypes; -} VkExportMemoryAllocateInfoNV; - -#define VK_KHX_device_group 1 -#define VK_KHX_DEVICE_GROUP_SPEC_VERSION 1 -#define VK_KHX_DEVICE_GROUP_EXTENSION_NAME "VK_KHX_device_group" -#define VK_MAX_DEVICE_GROUP_SIZE_KHX 32 - -typedef enum VkPeerMemoryFeatureFlagBitsKHX { - VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHX = 0x00000001, - VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHX = 0x00000002, - VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHX = 0x00000004, - VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHX = 0x00000008, - VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF -} VkPeerMemoryFeatureFlagBitsKHX; -typedef VkFlags VkPeerMemoryFeatureFlagsKHX; - -typedef enum VkMemoryAllocateFlagBitsKHX { - VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHX = 0x00000001, - VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF -} VkMemoryAllocateFlagBitsKHX; -typedef VkFlags VkMemoryAllocateFlagsKHX; - -typedef enum VkDeviceGroupPresentModeFlagBitsKHX { - VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHX = 0x00000001, - VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHX = 0x00000002, - VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHX = 0x00000004, - VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHX = 0x00000008, - VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHX = 0x7FFFFFFF -} VkDeviceGroupPresentModeFlagBitsKHX; -typedef VkFlags VkDeviceGroupPresentModeFlagsKHX; - -typedef struct VkMemoryAllocateFlagsInfoKHX { - VkStructureType sType; - const void *pNext; - VkMemoryAllocateFlagsKHX flags; - uint32_t deviceMask; -} VkMemoryAllocateFlagsInfoKHX; - -typedef struct VkBindBufferMemoryInfoKHX { - VkStructureType sType; - const void *pNext; - VkBuffer buffer; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - uint32_t deviceIndexCount; - const uint32_t *pDeviceIndices; -} VkBindBufferMemoryInfoKHX; - -typedef struct VkBindImageMemoryInfoKHX { - VkStructureType sType; - const void *pNext; - VkImage image; - VkDeviceMemory memory; - VkDeviceSize memoryOffset; - uint32_t deviceIndexCount; - const uint32_t *pDeviceIndices; - uint32_t SFRRectCount; - const VkRect2D *pSFRRects; -} VkBindImageMemoryInfoKHX; - -typedef struct VkDeviceGroupRenderPassBeginInfoKHX { - VkStructureType sType; - const void *pNext; - uint32_t deviceMask; - uint32_t deviceRenderAreaCount; - const VkRect2D *pDeviceRenderAreas; -} VkDeviceGroupRenderPassBeginInfoKHX; - -typedef struct VkDeviceGroupCommandBufferBeginInfoKHX { - VkStructureType sType; - const void *pNext; - uint32_t deviceMask; -} VkDeviceGroupCommandBufferBeginInfoKHX; - -typedef struct VkDeviceGroupSubmitInfoKHX { - VkStructureType sType; - const void *pNext; - uint32_t waitSemaphoreCount; - const uint32_t *pWaitSemaphoreDeviceIndices; - uint32_t commandBufferCount; - const uint32_t *pCommandBufferDeviceMasks; - uint32_t signalSemaphoreCount; - const uint32_t *pSignalSemaphoreDeviceIndices; -} VkDeviceGroupSubmitInfoKHX; - -typedef struct VkDeviceGroupBindSparseInfoKHX { - VkStructureType sType; - const void *pNext; - uint32_t resourceDeviceIndex; - uint32_t memoryDeviceIndex; -} VkDeviceGroupBindSparseInfoKHX; - -typedef struct VkDeviceGroupPresentCapabilitiesKHX { - VkStructureType sType; - const void *pNext; - uint32_t presentMask[VK_MAX_DEVICE_GROUP_SIZE_KHX]; - VkDeviceGroupPresentModeFlagsKHX modes; -} VkDeviceGroupPresentCapabilitiesKHX; - -typedef struct VkImageSwapchainCreateInfoKHX { - VkStructureType sType; - const void *pNext; - VkSwapchainKHR swapchain; -} VkImageSwapchainCreateInfoKHX; - -typedef struct VkBindImageMemorySwapchainInfoKHX { - VkStructureType sType; - const void *pNext; - VkSwapchainKHR swapchain; - uint32_t imageIndex; -} VkBindImageMemorySwapchainInfoKHX; - -typedef struct VkAcquireNextImageInfoKHX { - VkStructureType sType; - const void *pNext; - VkSwapchainKHR swapchain; - uint64_t timeout; - VkSemaphore semaphore; - VkFence fence; - uint32_t deviceMask; -} VkAcquireNextImageInfoKHX; - -typedef struct VkDeviceGroupPresentInfoKHX { - VkStructureType sType; - const void *pNext; - uint32_t swapchainCount; - const uint32_t *pDeviceMasks; - VkDeviceGroupPresentModeFlagBitsKHX mode; -} VkDeviceGroupPresentInfoKHX; - -typedef struct VkDeviceGroupSwapchainCreateInfoKHX { - VkStructureType sType; - const void *pNext; - VkDeviceGroupPresentModeFlagsKHX modes; -} VkDeviceGroupSwapchainCreateInfoKHX; - -typedef void(VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHX)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlagsKHX *pPeerMemoryFeatures); -typedef VkResult(VKAPI_PTR *PFN_vkBindBufferMemory2KHX)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfoKHX *pBindInfos); -typedef VkResult(VKAPI_PTR *PFN_vkBindImageMemory2KHX)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfoKHX *pBindInfos); -typedef void(VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHX)(VkCommandBuffer commandBuffer, uint32_t deviceMask); -typedef void(VKAPI_PTR *PFN_vkCmdDispatchBaseKHX)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); -typedef VkResult(VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHX)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHX *pDeviceGroupPresentCapabilities); -typedef VkResult(VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHX)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHX *pModes); -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHX)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t *pRectCount, VkRect2D *pRects); -typedef VkResult(VKAPI_PTR *PFN_vkAcquireNextImage2KHX)(VkDevice device, const VkAcquireNextImageInfoKHX *pAcquireInfo, uint32_t *pImageIndex); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHX( - VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlagsKHX *pPeerMemoryFeatures); - -VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHX( - VkDevice device, - uint32_t bindInfoCount, - const VkBindBufferMemoryInfoKHX *pBindInfos); - -VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHX( - VkDevice device, - uint32_t bindInfoCount, - const VkBindImageMemoryInfoKHX *pBindInfos); - -VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHX( - VkCommandBuffer commandBuffer, - uint32_t deviceMask); - -VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHX( - VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHX( - VkDevice device, - VkDeviceGroupPresentCapabilitiesKHX *pDeviceGroupPresentCapabilities); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHX( - VkDevice device, - VkSurfaceKHR surface, - VkDeviceGroupPresentModeFlagsKHX *pModes); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHX( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t *pRectCount, - VkRect2D *pRects); - -VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHX( - VkDevice device, - const VkAcquireNextImageInfoKHX *pAcquireInfo, - uint32_t *pImageIndex); -#endif - -#define VK_EXT_validation_flags 1 -#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 1 -#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags" - -typedef enum VkValidationCheckEXT { - VK_VALIDATION_CHECK_ALL_EXT = 0, - VK_VALIDATION_CHECK_SHADERS_EXT = 1, - VK_VALIDATION_CHECK_BEGIN_RANGE_EXT = VK_VALIDATION_CHECK_ALL_EXT, - VK_VALIDATION_CHECK_END_RANGE_EXT = VK_VALIDATION_CHECK_SHADERS_EXT, - VK_VALIDATION_CHECK_RANGE_SIZE_EXT = (VK_VALIDATION_CHECK_SHADERS_EXT - VK_VALIDATION_CHECK_ALL_EXT + 1), // NOLINT: misc-redundant-expression - VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF -} VkValidationCheckEXT; - -typedef struct VkValidationFlagsEXT { - VkStructureType sType; - const void *pNext; - uint32_t disabledValidationCheckCount; - VkValidationCheckEXT *pDisabledValidationChecks; -} VkValidationFlagsEXT; - -#ifdef VK_USE_PLATFORM_VI_NN -#define VK_NN_vi_surface 1 -#define VK_NN_VI_SURFACE_SPEC_VERSION 1 -#define VK_NN_VI_SURFACE_EXTENSION_NAME "VK_NN_vi_surface" - -typedef VkFlags VkViSurfaceCreateFlagsNN; - -typedef struct VkViSurfaceCreateInfoNN { - VkStructureType sType; - const void *pNext; - VkViSurfaceCreateFlagsNN flags; - void *window; -} VkViSurfaceCreateInfoNN; - -typedef VkResult(VKAPI_PTR *PFN_vkCreateViSurfaceNN)(VkInstance instance, const VkViSurfaceCreateInfoNN *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateViSurfaceNN( - VkInstance instance, - const VkViSurfaceCreateInfoNN *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkSurfaceKHR *pSurface); -#endif -#endif /* VK_USE_PLATFORM_VI_NN */ - -#define VK_EXT_shader_subgroup_ballot 1 -#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1 -#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot" - -#define VK_EXT_shader_subgroup_vote 1 -#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1 -#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote" - -#define VK_KHX_device_group_creation 1 -#define VK_KHX_DEVICE_GROUP_CREATION_SPEC_VERSION 1 -#define VK_KHX_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHX_device_group_creation" - -typedef struct VkPhysicalDeviceGroupPropertiesKHX { - VkStructureType sType; - void *pNext; - uint32_t physicalDeviceCount; - VkPhysicalDevice physicalDevices[VK_MAX_DEVICE_GROUP_SIZE_KHX]; - VkBool32 subsetAllocation; -} VkPhysicalDeviceGroupPropertiesKHX; - -typedef struct VkDeviceGroupDeviceCreateInfoKHX { - VkStructureType sType; - const void *pNext; - uint32_t physicalDeviceCount; - const VkPhysicalDevice *pPhysicalDevices; -} VkDeviceGroupDeviceCreateInfoKHX; - -typedef VkResult(VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHX)(VkInstance instance, uint32_t *pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupPropertiesKHX *pPhysicalDeviceGroupProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHX( - VkInstance instance, - uint32_t *pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupPropertiesKHX *pPhysicalDeviceGroupProperties); -#endif - -#define VK_NVX_device_generated_commands 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkObjectTableNVX) -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNVX) - -#define VK_NVX_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3 -#define VK_NVX_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NVX_device_generated_commands" - -typedef enum VkIndirectCommandsTokenTypeNVX { - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX = 0, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DESCRIPTOR_SET_NVX = 1, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NVX = 2, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NVX = 3, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NVX = 4, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NVX = 5, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NVX = 6, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX = 7, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_BEGIN_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_END_RANGE_NVX = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX, - VK_INDIRECT_COMMANDS_TOKEN_TYPE_RANGE_SIZE_NVX = (VK_INDIRECT_COMMANDS_TOKEN_TYPE_DISPATCH_NVX - VK_INDIRECT_COMMANDS_TOKEN_TYPE_PIPELINE_NVX + 1), // NOLINT: misc-redundant-expression - VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF -} VkIndirectCommandsTokenTypeNVX; - -typedef enum VkObjectEntryTypeNVX { - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX = 0, - VK_OBJECT_ENTRY_TYPE_PIPELINE_NVX = 1, - VK_OBJECT_ENTRY_TYPE_INDEX_BUFFER_NVX = 2, - VK_OBJECT_ENTRY_TYPE_VERTEX_BUFFER_NVX = 3, - VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX = 4, - VK_OBJECT_ENTRY_TYPE_BEGIN_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX, - VK_OBJECT_ENTRY_TYPE_END_RANGE_NVX = VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX, - VK_OBJECT_ENTRY_TYPE_RANGE_SIZE_NVX = (VK_OBJECT_ENTRY_TYPE_PUSH_CONSTANT_NVX - VK_OBJECT_ENTRY_TYPE_DESCRIPTOR_SET_NVX + 1), // NOLINT: misc-redundant-expression - VK_OBJECT_ENTRY_TYPE_MAX_ENUM_NVX = 0x7FFFFFFF -} VkObjectEntryTypeNVX; - -typedef enum VkIndirectCommandsLayoutUsageFlagBitsNVX { - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NVX = 0x00000001, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_SPARSE_SEQUENCES_BIT_NVX = 0x00000002, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EMPTY_EXECUTIONS_BIT_NVX = 0x00000004, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NVX = 0x00000008, - VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF -} VkIndirectCommandsLayoutUsageFlagBitsNVX; -typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNVX; - -typedef enum VkObjectEntryUsageFlagBitsNVX { - VK_OBJECT_ENTRY_USAGE_GRAPHICS_BIT_NVX = 0x00000001, - VK_OBJECT_ENTRY_USAGE_COMPUTE_BIT_NVX = 0x00000002, - VK_OBJECT_ENTRY_USAGE_FLAG_BITS_MAX_ENUM_NVX = 0x7FFFFFFF -} VkObjectEntryUsageFlagBitsNVX; -typedef VkFlags VkObjectEntryUsageFlagsNVX; - -typedef struct VkDeviceGeneratedCommandsFeaturesNVX { - VkStructureType sType; - const void *pNext; - VkBool32 computeBindingPointSupport; -} VkDeviceGeneratedCommandsFeaturesNVX; - -typedef struct VkDeviceGeneratedCommandsLimitsNVX { - VkStructureType sType; - const void *pNext; - uint32_t maxIndirectCommandsLayoutTokenCount; - uint32_t maxObjectEntryCounts; - uint32_t minSequenceCountBufferOffsetAlignment; - uint32_t minSequenceIndexBufferOffsetAlignment; - uint32_t minCommandsTokenBufferOffsetAlignment; -} VkDeviceGeneratedCommandsLimitsNVX; - -typedef struct VkIndirectCommandsTokenNVX { - VkIndirectCommandsTokenTypeNVX tokenType; - VkBuffer buffer; - VkDeviceSize offset; -} VkIndirectCommandsTokenNVX; - -typedef struct VkIndirectCommandsLayoutTokenNVX { - VkIndirectCommandsTokenTypeNVX tokenType; - uint32_t bindingUnit; - uint32_t dynamicCount; - uint32_t divisor; -} VkIndirectCommandsLayoutTokenNVX; - -typedef struct VkIndirectCommandsLayoutCreateInfoNVX { - VkStructureType sType; - const void *pNext; - VkPipelineBindPoint pipelineBindPoint; - VkIndirectCommandsLayoutUsageFlagsNVX flags; - uint32_t tokenCount; - const VkIndirectCommandsLayoutTokenNVX *pTokens; -} VkIndirectCommandsLayoutCreateInfoNVX; - -typedef struct VkCmdProcessCommandsInfoNVX { - VkStructureType sType; - const void *pNext; - VkObjectTableNVX objectTable; - VkIndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t indirectCommandsTokenCount; - const VkIndirectCommandsTokenNVX *pIndirectCommandsTokens; - uint32_t maxSequencesCount; - VkCommandBuffer targetCommandBuffer; - VkBuffer sequencesCountBuffer; - VkDeviceSize sequencesCountOffset; - VkBuffer sequencesIndexBuffer; - VkDeviceSize sequencesIndexOffset; -} VkCmdProcessCommandsInfoNVX; - -typedef struct VkCmdReserveSpaceForCommandsInfoNVX { - VkStructureType sType; - const void *pNext; - VkObjectTableNVX objectTable; - VkIndirectCommandsLayoutNVX indirectCommandsLayout; - uint32_t maxSequencesCount; -} VkCmdReserveSpaceForCommandsInfoNVX; - -typedef struct VkObjectTableCreateInfoNVX { - VkStructureType sType; - const void *pNext; - uint32_t objectCount; - const VkObjectEntryTypeNVX *pObjectEntryTypes; - const uint32_t *pObjectEntryCounts; - const VkObjectEntryUsageFlagsNVX *pObjectEntryUsageFlags; - uint32_t maxUniformBuffersPerDescriptor; - uint32_t maxStorageBuffersPerDescriptor; - uint32_t maxStorageImagesPerDescriptor; - uint32_t maxSampledImagesPerDescriptor; - uint32_t maxPipelineLayouts; -} VkObjectTableCreateInfoNVX; - -typedef struct VkObjectTableEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; -} VkObjectTableEntryNVX; - -typedef struct VkObjectTablePipelineEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipeline pipeline; -} VkObjectTablePipelineEntryNVX; - -typedef struct VkObjectTableDescriptorSetEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipelineLayout pipelineLayout; - VkDescriptorSet descriptorSet; -} VkObjectTableDescriptorSetEntryNVX; - -typedef struct VkObjectTableVertexBufferEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkBuffer buffer; -} VkObjectTableVertexBufferEntryNVX; - -typedef struct VkObjectTableIndexBufferEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkBuffer buffer; - VkIndexType indexType; -} VkObjectTableIndexBufferEntryNVX; - -typedef struct VkObjectTablePushConstantEntryNVX { - VkObjectEntryTypeNVX type; - VkObjectEntryUsageFlagsNVX flags; - VkPipelineLayout pipelineLayout; - VkShaderStageFlags stageFlags; -} VkObjectTablePushConstantEntryNVX; - -typedef void(VKAPI_PTR *PFN_vkCmdProcessCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdProcessCommandsInfoNVX *pProcessCommandsInfo); -typedef void(VKAPI_PTR *PFN_vkCmdReserveSpaceForCommandsNVX)(VkCommandBuffer commandBuffer, const VkCmdReserveSpaceForCommandsInfoNVX *pReserveSpaceInfo); -typedef VkResult(VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNVX)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNVX *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkIndirectCommandsLayoutNVX *pIndirectCommandsLayout); -typedef void(VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNVX)(VkDevice device, VkIndirectCommandsLayoutNVX indirectCommandsLayout, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkCreateObjectTableNVX)(VkDevice device, const VkObjectTableCreateInfoNVX *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkObjectTableNVX *pObjectTable); -typedef void(VKAPI_PTR *PFN_vkDestroyObjectTableNVX)(VkDevice device, VkObjectTableNVX objectTable, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkRegisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectTableEntryNVX *const *ppObjectTableEntries, const uint32_t *pObjectIndices); -typedef VkResult(VKAPI_PTR *PFN_vkUnregisterObjectsNVX)(VkDevice device, VkObjectTableNVX objectTable, uint32_t objectCount, const VkObjectEntryTypeNVX *pObjectEntryTypes, const uint32_t *pObjectIndices); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX)(VkPhysicalDevice physicalDevice, VkDeviceGeneratedCommandsFeaturesNVX *pFeatures, VkDeviceGeneratedCommandsLimitsNVX *pLimits); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdProcessCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdProcessCommandsInfoNVX *pProcessCommandsInfo); - -VKAPI_ATTR void VKAPI_CALL vkCmdReserveSpaceForCommandsNVX( - VkCommandBuffer commandBuffer, - const VkCmdReserveSpaceForCommandsInfoNVX *pReserveSpaceInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNVX( - VkDevice device, - const VkIndirectCommandsLayoutCreateInfoNVX *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkIndirectCommandsLayoutNVX *pIndirectCommandsLayout); - -VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNVX( - VkDevice device, - VkIndirectCommandsLayoutNVX indirectCommandsLayout, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkCreateObjectTableNVX( - VkDevice device, - const VkObjectTableCreateInfoNVX *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkObjectTableNVX *pObjectTable); - -VKAPI_ATTR void VKAPI_CALL vkDestroyObjectTableNVX( - VkDevice device, - VkObjectTableNVX objectTable, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterObjectsNVX( - VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectTableEntryNVX *const *ppObjectTableEntries, - const uint32_t *pObjectIndices); - -VKAPI_ATTR VkResult VKAPI_CALL vkUnregisterObjectsNVX( - VkDevice device, - VkObjectTableNVX objectTable, - uint32_t objectCount, - const VkObjectEntryTypeNVX *pObjectEntryTypes, - const uint32_t *pObjectIndices); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceGeneratedCommandsPropertiesNVX( - VkPhysicalDevice physicalDevice, - VkDeviceGeneratedCommandsFeaturesNVX *pFeatures, - VkDeviceGeneratedCommandsLimitsNVX *pLimits); -#endif - -#define VK_NV_clip_space_w_scaling 1 -#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1 -#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling" - -typedef struct VkViewportWScalingNV { - float xcoeff; - float ycoeff; -} VkViewportWScalingNV; - -typedef struct VkPipelineViewportWScalingStateCreateInfoNV { - VkStructureType sType; - const void *pNext; - VkBool32 viewportWScalingEnable; - uint32_t viewportCount; - const VkViewportWScalingNV *pViewportWScalings; -} VkPipelineViewportWScalingStateCreateInfoNV; - -typedef void(VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV *pViewportWScalings); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV( - VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewportWScalingNV *pViewportWScalings); -#endif - -#define VK_EXT_direct_mode_display 1 -#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1 -#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display" - -typedef VkResult(VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT( - VkPhysicalDevice physicalDevice, - VkDisplayKHR display); -#endif - -#define VK_EXT_display_surface_counter 1 -#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1 -#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter" -#define VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT - -typedef enum VkSurfaceCounterFlagBitsEXT { - VK_SURFACE_COUNTER_VBLANK_EXT = 0x00000001, - VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF -} VkSurfaceCounterFlagBitsEXT; -typedef VkFlags VkSurfaceCounterFlagsEXT; - -typedef struct VkSurfaceCapabilities2EXT { - VkStructureType sType; - void *pNext; - uint32_t minImageCount; - uint32_t maxImageCount; - VkExtent2D currentExtent; - VkExtent2D minImageExtent; - VkExtent2D maxImageExtent; - uint32_t maxImageArrayLayers; - VkSurfaceTransformFlagsKHR supportedTransforms; - VkSurfaceTransformFlagBitsKHR currentTransform; - VkCompositeAlphaFlagsKHR supportedCompositeAlpha; - VkImageUsageFlags supportedUsageFlags; - VkSurfaceCounterFlagsEXT supportedSurfaceCounters; -} VkSurfaceCapabilities2EXT; - -typedef VkResult(VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT *pSurfaceCapabilities); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT( - VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilities2EXT *pSurfaceCapabilities); -#endif - -#define VK_EXT_display_control 1 -#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1 -#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control" - -typedef enum VkDisplayPowerStateEXT { - VK_DISPLAY_POWER_STATE_OFF_EXT = 0, - VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1, - VK_DISPLAY_POWER_STATE_ON_EXT = 2, - VK_DISPLAY_POWER_STATE_BEGIN_RANGE_EXT = VK_DISPLAY_POWER_STATE_OFF_EXT, - VK_DISPLAY_POWER_STATE_END_RANGE_EXT = VK_DISPLAY_POWER_STATE_ON_EXT, - VK_DISPLAY_POWER_STATE_RANGE_SIZE_EXT = (VK_DISPLAY_POWER_STATE_ON_EXT - VK_DISPLAY_POWER_STATE_OFF_EXT + 1), // NOLINT: misc-redundant-expression - VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDisplayPowerStateEXT; - -typedef enum VkDeviceEventTypeEXT { - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0, - VK_DEVICE_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, - VK_DEVICE_EVENT_TYPE_END_RANGE_EXT = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT, - VK_DEVICE_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT - VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT + 1), // NOLINT: misc-redundant-expression - VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDeviceEventTypeEXT; - -typedef enum VkDisplayEventTypeEXT { - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0, - VK_DISPLAY_EVENT_TYPE_BEGIN_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, - VK_DISPLAY_EVENT_TYPE_END_RANGE_EXT = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT, - VK_DISPLAY_EVENT_TYPE_RANGE_SIZE_EXT = (VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT - VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT + 1), // NOLINT: misc-redundant-expression - VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDisplayEventTypeEXT; - -typedef struct VkDisplayPowerInfoEXT { - VkStructureType sType; - const void *pNext; - VkDisplayPowerStateEXT powerState; -} VkDisplayPowerInfoEXT; - -typedef struct VkDeviceEventInfoEXT { - VkStructureType sType; - const void *pNext; - VkDeviceEventTypeEXT deviceEvent; -} VkDeviceEventInfoEXT; - -typedef struct VkDisplayEventInfoEXT { - VkStructureType sType; - const void *pNext; - VkDisplayEventTypeEXT displayEvent; -} VkDisplayEventInfoEXT; - -typedef struct VkSwapchainCounterCreateInfoEXT { - VkStructureType sType; - const void *pNext; - VkSurfaceCounterFlagsEXT surfaceCounters; -} VkSwapchainCounterCreateInfoEXT; - -typedef VkResult(VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT *pDisplayPowerInfo); -typedef VkResult(VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT *pDeviceEventInfo, const VkAllocationCallbacks *pAllocator, VkFence *pFence); -typedef VkResult(VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT *pDisplayEventInfo, const VkAllocationCallbacks *pAllocator, VkFence *pFence); -typedef VkResult(VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t *pCounterValue); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT( - VkDevice device, - VkDisplayKHR display, - const VkDisplayPowerInfoEXT *pDisplayPowerInfo); - -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT( - VkDevice device, - const VkDeviceEventInfoEXT *pDeviceEventInfo, - const VkAllocationCallbacks *pAllocator, - VkFence *pFence); - -VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT( - VkDevice device, - VkDisplayKHR display, - const VkDisplayEventInfoEXT *pDisplayEventInfo, - const VkAllocationCallbacks *pAllocator, - VkFence *pFence); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT( - VkDevice device, - VkSwapchainKHR swapchain, - VkSurfaceCounterFlagBitsEXT counter, - uint64_t *pCounterValue); -#endif - -#define VK_GOOGLE_display_timing 1 -#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1 -#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing" - -typedef struct VkRefreshCycleDurationGOOGLE { - uint64_t refreshDuration; -} VkRefreshCycleDurationGOOGLE; - -typedef struct VkPastPresentationTimingGOOGLE { - uint32_t presentID; - uint64_t desiredPresentTime; - uint64_t actualPresentTime; - uint64_t earliestPresentTime; - uint64_t presentMargin; -} VkPastPresentationTimingGOOGLE; - -typedef struct VkPresentTimeGOOGLE { - uint32_t presentID; - uint64_t desiredPresentTime; -} VkPresentTimeGOOGLE; - -typedef struct VkPresentTimesInfoGOOGLE { - VkStructureType sType; - const void *pNext; - uint32_t swapchainCount; - const VkPresentTimeGOOGLE *pTimes; -} VkPresentTimesInfoGOOGLE; - -typedef VkResult(VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE *pDisplayTimingProperties); -typedef VkResult(VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t *pPresentationTimingCount, VkPastPresentationTimingGOOGLE *pPresentationTimings); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE( - VkDevice device, - VkSwapchainKHR swapchain, - VkRefreshCycleDurationGOOGLE *pDisplayTimingProperties); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE( - VkDevice device, - VkSwapchainKHR swapchain, - uint32_t *pPresentationTimingCount, - VkPastPresentationTimingGOOGLE *pPresentationTimings); -#endif - -#define VK_NV_sample_mask_override_coverage 1 -#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1 -#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage" - -#define VK_NV_geometry_shader_passthrough 1 -#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1 -#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough" - -#define VK_NV_viewport_array2 1 -#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION 1 -#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME "VK_NV_viewport_array2" - -#define VK_NVX_multiview_per_view_attributes 1 -#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1 -#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes" - -typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX { - VkStructureType sType; - void *pNext; - VkBool32 perViewPositionAllComponents; -} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX; - -#define VK_NV_viewport_swizzle 1 -#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1 -#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle" - -typedef enum VkViewportCoordinateSwizzleNV { - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5, - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6, - VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7, - VK_VIEWPORT_COORDINATE_SWIZZLE_BEGIN_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV, - VK_VIEWPORT_COORDINATE_SWIZZLE_END_RANGE_NV = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV, - VK_VIEWPORT_COORDINATE_SWIZZLE_RANGE_SIZE_NV = (VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV - VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV + 1), // NOLINT: misc-redundant-expression - VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF -} VkViewportCoordinateSwizzleNV; - -typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV; - -typedef struct VkViewportSwizzleNV { - VkViewportCoordinateSwizzleNV x; - VkViewportCoordinateSwizzleNV y; - VkViewportCoordinateSwizzleNV z; - VkViewportCoordinateSwizzleNV w; -} VkViewportSwizzleNV; - -typedef struct VkPipelineViewportSwizzleStateCreateInfoNV { - VkStructureType sType; - const void *pNext; - VkPipelineViewportSwizzleStateCreateFlagsNV flags; - uint32_t viewportCount; - const VkViewportSwizzleNV *pViewportSwizzles; -} VkPipelineViewportSwizzleStateCreateInfoNV; - -#define VK_EXT_discard_rectangles 1 -#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1 -#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles" - -typedef enum VkDiscardRectangleModeEXT { - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0, - VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1, - VK_DISCARD_RECTANGLE_MODE_BEGIN_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT, - VK_DISCARD_RECTANGLE_MODE_END_RANGE_EXT = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT, - VK_DISCARD_RECTANGLE_MODE_RANGE_SIZE_EXT = (VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT - VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT + 1), // NOLINT: misc-redundant-expression - VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkDiscardRectangleModeEXT; - -typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT; - -typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT { - VkStructureType sType; - void *pNext; - uint32_t maxDiscardRectangles; -} VkPhysicalDeviceDiscardRectanglePropertiesEXT; - -typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT { - VkStructureType sType; - const void *pNext; - VkPipelineDiscardRectangleStateCreateFlagsEXT flags; - VkDiscardRectangleModeEXT discardRectangleMode; - uint32_t discardRectangleCount; - const VkRect2D *pDiscardRectangles; -} VkPipelineDiscardRectangleStateCreateInfoEXT; - -typedef void(VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D *pDiscardRectangles); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT( - VkCommandBuffer commandBuffer, - uint32_t firstDiscardRectangle, - uint32_t discardRectangleCount, - const VkRect2D *pDiscardRectangles); -#endif - -#define VK_EXT_swapchain_colorspace 1 -#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 3 -#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace" - -#define VK_EXT_hdr_metadata 1 -#define VK_EXT_HDR_METADATA_SPEC_VERSION 1 -#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata" - -typedef struct VkXYColorEXT { - float x; - float y; -} VkXYColorEXT; - -typedef struct VkHdrMetadataEXT { - VkStructureType sType; - const void *pNext; - VkXYColorEXT displayPrimaryRed; - VkXYColorEXT displayPrimaryGreen; - VkXYColorEXT displayPrimaryBlue; - VkXYColorEXT whitePoint; - float maxLuminance; - float minLuminance; - float maxContentLightLevel; - float maxFrameAverageLightLevel; -} VkHdrMetadataEXT; - -typedef void(VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR *pSwapchains, const VkHdrMetadataEXT *pMetadata); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT( - VkDevice device, - uint32_t swapchainCount, - const VkSwapchainKHR *pSwapchains, - const VkHdrMetadataEXT *pMetadata); -#endif - -#ifdef VK_USE_PLATFORM_IOS_MVK -#define VK_MVK_ios_surface 1 -#define VK_MVK_IOS_SURFACE_SPEC_VERSION 2 -#define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface" - -typedef VkFlags VkIOSSurfaceCreateFlagsMVK; - -typedef struct VkIOSSurfaceCreateInfoMVK { - VkStructureType sType; - const void *pNext; - VkIOSSurfaceCreateFlagsMVK flags; - const void *pView; -} VkIOSSurfaceCreateInfoMVK; - -typedef VkResult(VKAPI_PTR *PFN_vkCreateIOSSurfaceMVK)(VkInstance instance, const VkIOSSurfaceCreateInfoMVK *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateIOSSurfaceMVK( - VkInstance instance, - const VkIOSSurfaceCreateInfoMVK *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkSurfaceKHR *pSurface); -#endif -#endif /* VK_USE_PLATFORM_IOS_MVK */ - -#ifdef VK_USE_PLATFORM_MACOS_MVK -#define VK_MVK_macos_surface 1 -#define VK_MVK_MACOS_SURFACE_SPEC_VERSION 2 -#define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface" - -typedef VkFlags VkMacOSSurfaceCreateFlagsMVK; - -typedef struct VkMacOSSurfaceCreateInfoMVK { - VkStructureType sType; - const void *pNext; - VkMacOSSurfaceCreateFlagsMVK flags; - const void *pView; -} VkMacOSSurfaceCreateInfoMVK; - -typedef VkResult(VKAPI_PTR *PFN_vkCreateMacOSSurfaceMVK)(VkInstance instance, const VkMacOSSurfaceCreateInfoMVK *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateMacOSSurfaceMVK( - VkInstance instance, - const VkMacOSSurfaceCreateInfoMVK *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkSurfaceKHR *pSurface); -#endif -#endif /* VK_USE_PLATFORM_MACOS_MVK */ - -#define VK_EXT_sampler_filter_minmax 1 -#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 1 -#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax" - -typedef enum VkSamplerReductionModeEXT { - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = 0, - VK_SAMPLER_REDUCTION_MODE_MIN_EXT = 1, - VK_SAMPLER_REDUCTION_MODE_MAX_EXT = 2, - VK_SAMPLER_REDUCTION_MODE_BEGIN_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT, - VK_SAMPLER_REDUCTION_MODE_END_RANGE_EXT = VK_SAMPLER_REDUCTION_MODE_MAX_EXT, - VK_SAMPLER_REDUCTION_MODE_RANGE_SIZE_EXT = (VK_SAMPLER_REDUCTION_MODE_MAX_EXT - VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT + 1), // NOLINT: misc-redundant-expression - VK_SAMPLER_REDUCTION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF -} VkSamplerReductionModeEXT; - -typedef struct VkSamplerReductionModeCreateInfoEXT { - VkStructureType sType; - const void *pNext; - VkSamplerReductionModeEXT reductionMode; -} VkSamplerReductionModeCreateInfoEXT; - -typedef struct VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT { - VkStructureType sType; - void *pNext; - VkBool32 filterMinmaxSingleComponentFormats; - VkBool32 filterMinmaxImageComponentMapping; -} VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT; - -#define VK_AMD_gpu_shader_int16 1 -#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 1 -#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16" - -#define VK_AMD_mixed_attachment_samples 1 -#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1 -#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples" - -#define VK_AMD_shader_fragment_mask 1 -#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1 -#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask" - -#define VK_EXT_shader_stencil_export 1 -#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1 -#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export" - -#define VK_EXT_sample_locations 1 -#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1 -#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations" - -typedef struct VkSampleLocationEXT { - float x; - float y; -} VkSampleLocationEXT; - -typedef struct VkSampleLocationsInfoEXT { - VkStructureType sType; - const void *pNext; - VkSampleCountFlagBits sampleLocationsPerPixel; - VkExtent2D sampleLocationGridSize; - uint32_t sampleLocationsCount; - const VkSampleLocationEXT *pSampleLocations; -} VkSampleLocationsInfoEXT; - -typedef struct VkAttachmentSampleLocationsEXT { - uint32_t attachmentIndex; - VkSampleLocationsInfoEXT sampleLocationsInfo; -} VkAttachmentSampleLocationsEXT; - -typedef struct VkSubpassSampleLocationsEXT { - uint32_t subpassIndex; - VkSampleLocationsInfoEXT sampleLocationsInfo; -} VkSubpassSampleLocationsEXT; - -typedef struct VkRenderPassSampleLocationsBeginInfoEXT { - VkStructureType sType; - const void *pNext; - uint32_t attachmentInitialSampleLocationsCount; - const VkAttachmentSampleLocationsEXT *pAttachmentInitialSampleLocations; - uint32_t postSubpassSampleLocationsCount; - const VkSubpassSampleLocationsEXT *pSubpassSampleLocations; -} VkRenderPassSampleLocationsBeginInfoEXT; - -typedef struct VkPipelineSampleLocationsStateCreateInfoEXT { - VkStructureType sType; - const void *pNext; - VkBool32 sampleLocationsEnable; - VkSampleLocationsInfoEXT sampleLocationsInfo; -} VkPipelineSampleLocationsStateCreateInfoEXT; - -typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT { - VkStructureType sType; - void *pNext; - VkSampleCountFlags sampleLocationSampleCounts; - VkExtent2D maxSampleLocationGridSize; - float sampleLocationCoordinateRange[2]; - uint32_t sampleLocationSubPixelBits; - VkBool32 variableSampleLocations; -} VkPhysicalDeviceSampleLocationsPropertiesEXT; - -typedef struct VkMultisamplePropertiesEXT { - VkStructureType sType; - void *pNext; - VkExtent2D maxSampleLocationGridSize; -} VkMultisamplePropertiesEXT; - -typedef void(VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT *pSampleLocationsInfo); -typedef void(VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT *pMultisampleProperties); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT( - VkCommandBuffer commandBuffer, - const VkSampleLocationsInfoEXT *pSampleLocationsInfo); - -VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT( - VkPhysicalDevice physicalDevice, - VkSampleCountFlagBits samples, - VkMultisamplePropertiesEXT *pMultisampleProperties); -#endif - -#define VK_EXT_blend_operation_advanced 1 -#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2 -#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced" - -typedef enum VkBlendOverlapEXT { - VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0, - VK_BLEND_OVERLAP_DISJOINT_EXT = 1, - VK_BLEND_OVERLAP_CONJOINT_EXT = 2, - VK_BLEND_OVERLAP_BEGIN_RANGE_EXT = VK_BLEND_OVERLAP_UNCORRELATED_EXT, - VK_BLEND_OVERLAP_END_RANGE_EXT = VK_BLEND_OVERLAP_CONJOINT_EXT, - VK_BLEND_OVERLAP_RANGE_SIZE_EXT = (VK_BLEND_OVERLAP_CONJOINT_EXT - VK_BLEND_OVERLAP_UNCORRELATED_EXT + 1), // NOLINT: misc-redundant-expression - VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF -} VkBlendOverlapEXT; - -typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT { - VkStructureType sType; - void *pNext; - VkBool32 advancedBlendCoherentOperations; -} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT; - -typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT { - VkStructureType sType; - void *pNext; - uint32_t advancedBlendMaxColorAttachments; - VkBool32 advancedBlendIndependentBlend; - VkBool32 advancedBlendNonPremultipliedSrcColor; - VkBool32 advancedBlendNonPremultipliedDstColor; - VkBool32 advancedBlendCorrelatedOverlap; - VkBool32 advancedBlendAllOperations; -} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT; - -typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT { - VkStructureType sType; - const void *pNext; - VkBool32 srcPremultiplied; - VkBool32 dstPremultiplied; - VkBlendOverlapEXT blendOverlap; -} VkPipelineColorBlendAdvancedStateCreateInfoEXT; - -#define VK_NV_fragment_coverage_to_color 1 -#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1 -#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color" - -typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV; - -typedef struct VkPipelineCoverageToColorStateCreateInfoNV { - VkStructureType sType; - const void *pNext; - VkPipelineCoverageToColorStateCreateFlagsNV flags; - VkBool32 coverageToColorEnable; - uint32_t coverageToColorLocation; -} VkPipelineCoverageToColorStateCreateInfoNV; - -#define VK_NV_framebuffer_mixed_samples 1 -#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1 -#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples" - -typedef enum VkCoverageModulationModeNV { - VK_COVERAGE_MODULATION_MODE_NONE_NV = 0, - VK_COVERAGE_MODULATION_MODE_RGB_NV = 1, - VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2, - VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3, - VK_COVERAGE_MODULATION_MODE_BEGIN_RANGE_NV = VK_COVERAGE_MODULATION_MODE_NONE_NV, - VK_COVERAGE_MODULATION_MODE_END_RANGE_NV = VK_COVERAGE_MODULATION_MODE_RGBA_NV, - VK_COVERAGE_MODULATION_MODE_RANGE_SIZE_NV = (VK_COVERAGE_MODULATION_MODE_RGBA_NV - VK_COVERAGE_MODULATION_MODE_NONE_NV + 1), // NOLINT: misc-redundant-expression - VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF -} VkCoverageModulationModeNV; - -typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV; - -typedef struct VkPipelineCoverageModulationStateCreateInfoNV { - VkStructureType sType; - const void *pNext; - VkPipelineCoverageModulationStateCreateFlagsNV flags; - VkCoverageModulationModeNV coverageModulationMode; - VkBool32 coverageModulationTableEnable; - uint32_t coverageModulationTableCount; - const float *pCoverageModulationTable; -} VkPipelineCoverageModulationStateCreateInfoNV; - -#define VK_NV_fill_rectangle 1 -#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1 -#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle" - -#define VK_EXT_post_depth_coverage 1 -#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1 -#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage" - -#define VK_EXT_validation_cache 1 -VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT) - -#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1 -#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache" - -typedef enum VkValidationCacheHeaderVersionEXT { - VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1, - VK_VALIDATION_CACHE_HEADER_VERSION_BEGIN_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, - VK_VALIDATION_CACHE_HEADER_VERSION_END_RANGE_EXT = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT, - VK_VALIDATION_CACHE_HEADER_VERSION_RANGE_SIZE_EXT = (VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT - VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT + 1), // NOLINT: misc-redundant-expression - VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF -} VkValidationCacheHeaderVersionEXT; - -typedef VkFlags VkValidationCacheCreateFlagsEXT; - -typedef struct VkValidationCacheCreateInfoEXT { - VkStructureType sType; - const void *pNext; - VkValidationCacheCreateFlagsEXT flags; - size_t initialDataSize; - const void *pInitialData; -} VkValidationCacheCreateInfoEXT; - -typedef struct VkShaderModuleValidationCacheCreateInfoEXT { - VkStructureType sType; - const void *pNext; - VkValidationCacheEXT validationCache; -} VkShaderModuleValidationCacheCreateInfoEXT; - -typedef VkResult(VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkValidationCacheEXT *pValidationCache); -typedef void(VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks *pAllocator); -typedef VkResult(VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT *pSrcCaches); -typedef VkResult(VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t *pDataSize, void *pData); - -#ifndef VK_NO_PROTOTYPES -VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT( - VkDevice device, - const VkValidationCacheCreateInfoEXT *pCreateInfo, - const VkAllocationCallbacks *pAllocator, - VkValidationCacheEXT *pValidationCache); - -VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT( - VkDevice device, - VkValidationCacheEXT validationCache, - const VkAllocationCallbacks *pAllocator); - -VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT( - VkDevice device, - VkValidationCacheEXT dstCache, - uint32_t srcCacheCount, - const VkValidationCacheEXT *pSrcCaches); - -VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT( - VkDevice device, - VkValidationCacheEXT validationCache, - size_t *pDataSize, - void *pData); -#endif - -#define VK_EXT_shader_viewport_index_layer 1 -#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1 -#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer" - -// Provided by VK_VERSION_1_1 -typedef struct VkPhysicalDeviceShaderFloat16Int8Features { - uint32_t sType; - void *pNext; - VkBool32 shaderFloat16; - VkBool32 shaderInt8; -} VkPhysicalDeviceShaderFloat16Int8FeaturesKHR; -#define VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR 1000082000 - -// Provided by VK_VERSION_1_2 -typedef struct VkPhysicalDevice8BitStorageFeatures { - uint32_t sType; - void *pNext; - VkBool32 storageBuffer8BitAccess; - VkBool32 uniformAndStorageBuffer8BitAccess; - VkBool32 storagePushConstant8; -} VkPhysicalDevice8BitStorageFeaturesKHR; -#define VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR 1000177000 - -// Provided by VK_KHR_portability_enumeration -#define VK_KHR_portability_enumeration 1 -#define VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION 1 -#define VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME "VK_KHR_portability_enumeration" - -} // extern "C" - -#endif // HALIDE_MINI_VULKAN_H diff --git a/src/runtime/vulkan.cpp b/src/runtime/vulkan.cpp index cb79950f7421..776088448891 100644 --- a/src/runtime/vulkan.cpp +++ b/src/runtime/vulkan.cpp @@ -34,9 +34,9 @@ WEAK int halide_vulkan_acquire_context(void *user_context, VkInstance *instance, VkDevice *device, VkPhysicalDevice *physical_device, - VkCommandPool *command_pool, VkQueue *queue, uint32_t *queue_family_index, + VkDebugUtilsMessengerEXT *messenger, bool create) { #ifdef DEBUG_RUNTIME halide_start_clock(user_context); @@ -45,7 +45,7 @@ WEAK int halide_vulkan_acquire_context(void *user_context, halide_debug_assert(user_context, device != nullptr); halide_debug_assert(user_context, queue != nullptr); halide_debug_assert(user_context, &thread_lock != nullptr); - while (__atomic_test_and_set(&thread_lock, __ATOMIC_ACQUIRE)) {} + halide_mutex_lock(&thread_lock); // If the context has not been initialized, initialize it now. if ((cached_instance == nullptr) && create) { @@ -54,12 +54,12 @@ WEAK int halide_vulkan_acquire_context(void *user_context, &cached_instance, &cached_device, &cached_physical_device, - &cached_command_pool, &cached_queue, - &cached_queue_family_index); + &cached_queue_family_index, + &cached_messenger); if (error_code != halide_error_code_success) { debug(user_context) << "halide_vulkan_acquire_context: FAILED to create context!\n"; - __atomic_clear(&thread_lock, __ATOMIC_RELEASE); + halide_mutex_unlock(&thread_lock); return error_code; } } @@ -68,17 +68,24 @@ WEAK int halide_vulkan_acquire_context(void *user_context, *instance = cached_instance; *device = cached_device; *physical_device = cached_physical_device; - *command_pool = cached_command_pool; *queue = cached_queue; *queue_family_index = cached_queue_family_index; + *messenger = cached_messenger; return halide_error_code_success; } -WEAK int halide_vulkan_release_context(void *user_context, VkInstance instance, VkDevice device, VkQueue queue) { - __atomic_clear(&thread_lock, __ATOMIC_RELEASE); +WEAK int halide_vulkan_release_context(void *user_context, VkInstance instance, VkDevice device, VkQueue queue, VkDebugUtilsMessengerEXT messenger) { + halide_mutex_unlock(&thread_lock); return halide_error_code_success; } +WEAK bool halide_vulkan_is_initialized() { + halide_mutex_lock(&thread_lock); + bool is_initialized = (cached_instance != nullptr) && (cached_device != nullptr); + halide_mutex_unlock(&thread_lock); + return is_initialized; +} + WEAK int halide_vulkan_device_free(void *user_context, halide_buffer_t *halide_buffer) { debug(user_context) << "halide_vulkan_device_free (user_context: " << user_context @@ -144,11 +151,11 @@ WEAK int halide_vulkan_initialize_kernels(void *user_context, void **state_ptr, #ifdef DEBUG_RUNTIME uint64_t t_before = halide_current_time_ns(user_context); #endif - debug(user_context) << "halide_vulkan_initialize_kernels got compilation_cache mutex.\n"; + VulkanCompilationCacheEntry *cache_entry = nullptr; if (!compilation_cache.kernel_state_setup(user_context, state_ptr, ctx.device, cache_entry, - Halide::Runtime::Internal::Vulkan::vk_compile_shader_module, + Halide::Runtime::Internal::Vulkan::vk_compile_kernel_module, user_context, ctx.allocator, src, size)) { error(user_context) << "Vulkan: Failed to setup compilation cache!\n"; return halide_error_code_generic_error; @@ -196,7 +203,11 @@ WEAK int halide_vulkan_device_sync(void *user_context, halide_buffer_t *) { uint64_t t_before = halide_current_time_ns(user_context); #endif - vkQueueWaitIdle(ctx.queue); + VkResult result = vkQueueWaitIdle(ctx.queue); + if (result != VK_SUCCESS) { + error(user_context) << "Vulkan: vkQueueWaitIdle returned " << vk_get_error_name(result) << "\n"; + return halide_error_code_generic_error; + } #ifdef DEBUG_RUNTIME uint64_t t_after = halide_current_time_ns(user_context); @@ -211,32 +222,32 @@ WEAK int halide_vulkan_device_release(void *user_context) { << "halide_vulkan_device_release (user_context: " << user_context << ")\n"; VulkanMemoryAllocator *allocator = nullptr; - VkInstance instance = nullptr; - VkDevice device = nullptr; - VkCommandPool command_pool = VkInvalidCommandPool; - VkPhysicalDevice physical_device = nullptr; - VkQueue queue = nullptr; + VkInstance instance = VK_NULL_HANDLE; + VkDevice device = VK_NULL_HANDLE; + VkPhysicalDevice physical_device = VK_NULL_HANDLE; + VkQueue queue = VK_NULL_HANDLE; uint32_t queue_family_index = 0; + VkDebugUtilsMessengerEXT messenger = VK_NULL_HANDLE; int destroy_status = halide_error_code_success; int acquire_status = halide_vulkan_acquire_context(user_context, reinterpret_cast(&allocator), - &instance, &device, &physical_device, &command_pool, &queue, &queue_family_index, false); + &instance, &device, &physical_device, &queue, &queue_family_index, &messenger, false); if (acquire_status == halide_error_code_success) { // Destroy the context if we created it if ((instance == cached_instance) && (device == cached_device)) { - destroy_status = vk_destroy_context(user_context, allocator, instance, device, physical_device, command_pool, queue); - cached_command_pool = VkInvalidCommandPool; + destroy_status = vk_destroy_context(user_context, allocator, instance, device, physical_device, queue, messenger); cached_allocator = nullptr; - cached_device = nullptr; - cached_physical_device = nullptr; - cached_queue = nullptr; + cached_device = VK_NULL_HANDLE; + cached_physical_device = VK_NULL_HANDLE; + cached_queue = VK_NULL_HANDLE; cached_queue_family_index = 0; - cached_instance = nullptr; + cached_instance = VK_NULL_HANDLE; + cached_messenger = VK_NULL_HANDLE; } - halide_vulkan_release_context(user_context, instance, device, queue); + halide_vulkan_release_context(user_context, instance, device, queue, messenger); } return destroy_status; @@ -277,12 +288,14 @@ WEAK int halide_vulkan_device_malloc(void *user_context, halide_buffer_t *buf) { #ifdef DEBUG_RUNTIME debug(user_context) << " allocating buffer: "; if (buf && buf->dim) { - debug(user_context) << "extents: " << buf->dim[0].extent << "x" - << buf->dim[1].extent << "x" << buf->dim[2].extent << "x" - << buf->dim[3].extent << " " - << "strides: " << buf->dim[0].stride << "x" - << buf->dim[1].stride << "x" << buf->dim[2].stride << "x" - << buf->dim[3].stride << " "; + debug(user_context) << "extents: "; + for (int i = 0; i < buf->dimensions; i++) { + debug(user_context) << buf->dim[i].extent << " "; + } + debug(user_context) << "strides: "; + for (int i = 0; i < buf->dimensions; i++) { + debug(user_context) << buf->dim[i].stride << " "; + } } debug(user_context) << "type: " << buf->type << " " << "size_in_bytes: " << (uint64_t)size << " " @@ -323,10 +336,15 @@ WEAK int halide_vulkan_device_malloc(void *user_context, halide_buffer_t *buf) { return halide_error_code_internal_error; } - int error_code = vk_clear_device_buffer(user_context, ctx.allocator, ctx.command_pool, ctx.queue, *device_buffer); + ScopedVulkanCommandBufferAndPool cmds(user_context, ctx.allocator, ctx.queue_family_index); + if (cmds.error_code != halide_error_code_success) { + error(user_context) << "Vulkan: Failed to create command buffer and pool for context!\n"; + return cmds.error_code; + } + + int error_code = vk_clear_device_buffer(user_context, ctx.allocator, cmds.command_buffer, ctx.queue, *device_buffer); if (error_code != halide_error_code_success) { - error(user_context) << "Vulkan: Failed to destroy command buffer!\n"; - return error_code; + error(user_context) << "Vulkan: Failed to clear device buffer!\n"; } #ifdef DEBUG_RUNTIME @@ -334,7 +352,7 @@ WEAK int halide_vulkan_device_malloc(void *user_context, halide_buffer_t *buf) { debug(user_context) << " Time: " << (t_after - t_before) / 1.0e6 << " ms\n"; #endif - return halide_error_code_success; + return error_code; } WEAK int halide_vulkan_copy_to_device(void *user_context, halide_buffer_t *halide_buffer) { @@ -433,12 +451,10 @@ WEAK int halide_vulkan_copy_to_device(void *user_context, halide_buffer_t *halid << " from halide buffer=" << halide_buffer << "\n"; #endif - // create a command buffer - VkCommandBuffer command_buffer; - error_code = vk_create_command_buffer(user_context, ctx.allocator, ctx.command_pool, &command_buffer); - if (error_code != halide_error_code_success) { - error(user_context) << "Vulkan: Failed to create command buffer!\n"; - return error_code; + ScopedVulkanCommandBufferAndPool cmds(user_context, ctx.allocator, ctx.queue_family_index); + if (cmds.error_code != halide_error_code_success) { + error(user_context) << "Vulkan: Failed to create command buffer and pool!\n"; + return cmds.error_code; } // begin the command buffer @@ -450,7 +466,7 @@ WEAK int halide_vulkan_copy_to_device(void *user_context, halide_buffer_t *halid nullptr // pointer to parent command buffer }; - VkResult result = vkBeginCommandBuffer(command_buffer, &command_buffer_begin_info); + VkResult result = vkBeginCommandBuffer(cmds.command_buffer, &command_buffer_begin_info); if (result != VK_SUCCESS) { error(user_context) << "Vulkan: vkBeginCommandBuffer returned " << vk_get_error_name(result) << "\n"; return halide_error_code_device_buffer_copy_failed; @@ -465,7 +481,7 @@ WEAK int halide_vulkan_copy_to_device(void *user_context, halide_buffer_t *halid uint64_t dst_offset = device_region->range.head_offset; // enqueue the copy operation, using the allocated buffers - error_code = vk_do_multidimensional_copy(user_context, command_buffer, copy_helper, + error_code = vk_do_multidimensional_copy(user_context, cmds.command_buffer, copy_helper, src_offset, dst_offset, halide_buffer->dimensions, from_host, to_host); @@ -476,7 +492,7 @@ WEAK int halide_vulkan_copy_to_device(void *user_context, halide_buffer_t *halid } // end the command buffer - result = vkEndCommandBuffer(command_buffer); + result = vkEndCommandBuffer(cmds.command_buffer); if (result != VK_SUCCESS) { error(user_context) << "Vulkan: vkEndCommandBuffer returned " << vk_get_error_name(result) << "\n"; return halide_error_code_device_buffer_copy_failed; @@ -491,12 +507,12 @@ WEAK int halide_vulkan_copy_to_device(void *user_context, halide_buffer_t *halid nullptr, // semaphores nullptr, // pipeline stages where semaphore waits occur 1, // how many command buffers to execute - &command_buffer, // the command buffers + &(cmds.command_buffer), // the command buffers 0, // number of semaphores to signal nullptr // the semaphores to signal }; - result = vkQueueSubmit(ctx.queue, 1, &submit_info, 0); + result = vkQueueSubmit(ctx.queue, 1, &submit_info, VK_NULL_HANDLE); if (result != VK_SUCCESS) { error(user_context) << "Vulkan: vkQueueSubmit returned " << vk_get_error_name(result) << "\n"; return halide_error_code_device_buffer_copy_failed; @@ -516,12 +532,6 @@ WEAK int halide_vulkan_copy_to_device(void *user_context, halide_buffer_t *halid ctx.allocator->reclaim(user_context, staging_region); } - error_code = vk_destroy_command_buffer(user_context, ctx.allocator, ctx.command_pool, command_buffer); - if (error_code != halide_error_code_success) { - error(user_context) << "Vulkan: Failed to destroy command buffer!\n"; - return error_code; - } - #ifdef DEBUG_RUNTIME uint64_t t_after = halide_current_time_ns(user_context); debug(user_context) << " Time: " << (t_after - t_before) / 1.0e6 << " ms\n"; @@ -609,12 +619,10 @@ WEAK int halide_vulkan_copy_to_host(void *user_context, halide_buffer_t *halide_ << " into halide buffer=" << halide_buffer << "\n"; #endif - // create a command buffer - VkCommandBuffer command_buffer; - int error_code = vk_create_command_buffer(user_context, ctx.allocator, ctx.command_pool, &command_buffer); - if (error_code != halide_error_code_success) { - error(user_context) << "Vulkan: Failed to create command buffer!\n"; - return error_code; + ScopedVulkanCommandBufferAndPool cmds(user_context, ctx.allocator, ctx.queue_family_index); + if (cmds.error_code != halide_error_code_success) { + error(user_context) << "Vulkan: Failed to create command buffer and pool!\n"; + return cmds.error_code; } // begin the command buffer @@ -626,7 +634,7 @@ WEAK int halide_vulkan_copy_to_host(void *user_context, halide_buffer_t *halide_ nullptr // pointer to parent command buffer }; - VkResult result = vkBeginCommandBuffer(command_buffer, &command_buffer_begin_info); + VkResult result = vkBeginCommandBuffer(cmds.command_buffer, &command_buffer_begin_info); if (result != VK_SUCCESS) { error(user_context) << "Vulkan: vkBeginCommandBuffer returned " << vk_get_error_name(result) << "\n"; return halide_error_code_device_buffer_copy_failed; @@ -642,10 +650,10 @@ WEAK int halide_vulkan_copy_to_host(void *user_context, halide_buffer_t *halide_ uint64_t dst_offset = 0; // enqueue the copy operation, using the allocated buffers - error_code = vk_do_multidimensional_copy(user_context, command_buffer, copy_helper, - src_offset, dst_offset, - halide_buffer->dimensions, - from_host, to_host); + int error_code = vk_do_multidimensional_copy(user_context, cmds.command_buffer, copy_helper, + src_offset, dst_offset, + halide_buffer->dimensions, + from_host, to_host); if (error_code != halide_error_code_success) { error(user_context) << "Vulkan: vk_do_multidimensional_copy failed!\n"; @@ -653,7 +661,7 @@ WEAK int halide_vulkan_copy_to_host(void *user_context, halide_buffer_t *halide_ } // end the command buffer - result = vkEndCommandBuffer(command_buffer); + result = vkEndCommandBuffer(cmds.command_buffer); if (result != VK_SUCCESS) { error(user_context) << "vkEndCommandBuffer returned " << vk_get_error_name(result) << "\n"; return result; @@ -668,12 +676,12 @@ WEAK int halide_vulkan_copy_to_host(void *user_context, halide_buffer_t *halide_ nullptr, // semaphores nullptr, // pipeline stages where semaphore waits occur 1, // how many command buffers to execute - &command_buffer, // the command buffers + &(cmds.command_buffer), // the command buffers 0, // number of semaphores to signal nullptr // the semaphores to signal }; - result = vkQueueSubmit(ctx.queue, 1, &submit_info, 0); + result = vkQueueSubmit(ctx.queue, 1, &submit_info, VK_NULL_HANDLE); if (result != VK_SUCCESS) { error(user_context) << "Vulkan: vkQueueSubmit returned " << vk_get_error_name(result) << "\n"; return halide_error_code_copy_to_device_failed; @@ -710,7 +718,6 @@ WEAK int halide_vulkan_copy_to_host(void *user_context, halide_buffer_t *halide_ } else { ctx.allocator->reclaim(user_context, staging_region); } - vk_destroy_command_buffer(user_context, ctx.allocator, ctx.command_pool, command_buffer); #ifdef DEBUG_RUNTIME uint64_t t_after = halide_current_time_ns(user_context); @@ -801,7 +808,9 @@ WEAK int halide_vulkan_buffer_copy(void *user_context, struct halide_buffer_t *s // Need to make sure all reads and writes to/from source are complete. MemoryRequest request = {0}; request.size = src->size_in_bytes(); - request.properties.usage = MemoryUsage::TransferSrc; + + // NOTE: We may re-use this buffer so enable both src and dst + request.properties.usage = MemoryUsage::TransferSrcDst; request.properties.caching = MemoryCaching::UncachedCoherent; request.properties.visibility = MemoryVisibility::DeviceToHost; @@ -823,7 +832,9 @@ WEAK int halide_vulkan_buffer_copy(void *user_context, struct halide_buffer_t *s // Need to make sure all reads and writes to/from destination are complete. MemoryRequest request = {0}; request.size = src->size_in_bytes(); - request.properties.usage = MemoryUsage::TransferSrc; + + // NOTE: We may re-use this buffer so enable both src and dst + request.properties.usage = MemoryUsage::TransferSrcDst; request.properties.caching = MemoryCaching::UncachedCoherent; request.properties.visibility = MemoryVisibility::HostToDevice; @@ -882,11 +893,9 @@ WEAK int halide_vulkan_buffer_copy(void *user_context, struct halide_buffer_t *s VkBuffer *src_device_buffer = reinterpret_cast(src_memory_region->handle); VkBuffer *dst_device_buffer = reinterpret_cast(dst_memory_region->handle); - // create a command buffer - VkCommandBuffer command_buffer; - error_code = vk_create_command_buffer(user_context, ctx.allocator, ctx.command_pool, &command_buffer); - if (error_code != halide_error_code_success) { - error(user_context) << "Vulkan: Failed to create command buffer!\n"; + ScopedVulkanCommandBufferAndPool cmds(user_context, ctx.allocator, ctx.queue_family_index); + if (cmds.error_code != halide_error_code_success) { + error(user_context) << "Vulkan: Failed to create command buffer and pool!\n"; if (to_host) { return halide_error_code_copy_to_host_failed; } else { @@ -903,7 +912,7 @@ WEAK int halide_vulkan_buffer_copy(void *user_context, struct halide_buffer_t *s nullptr // pointer to parent command buffer }; - VkResult result = vkBeginCommandBuffer(command_buffer, &command_buffer_begin_info); + VkResult result = vkBeginCommandBuffer(cmds.command_buffer, &command_buffer_begin_info); if (result != VK_SUCCESS) { error(user_context) << "Vulkan: vkBeginCommandBuffer returned " << vk_get_error_name(result) << "\n"; if (to_host) { @@ -928,7 +937,7 @@ WEAK int halide_vulkan_buffer_copy(void *user_context, struct halide_buffer_t *s debug(user_context) << " dst region=" << (void *)dst_memory_region << " buffer=" << (void *)dst_device_buffer << " crop_offset=" << (uint64_t)dst_buffer_region->range.head_offset << " copy_offset=" << dst_offset << "\n"; // enqueue the copy operation, using the allocated buffers - error_code = vk_do_multidimensional_copy(user_context, command_buffer, copy_helper, + error_code = vk_do_multidimensional_copy(user_context, cmds.command_buffer, copy_helper, src_offset, dst_offset, src->dimensions, from_host, to_host); @@ -939,7 +948,7 @@ WEAK int halide_vulkan_buffer_copy(void *user_context, struct halide_buffer_t *s } // end the command buffer - result = vkEndCommandBuffer(command_buffer); + result = vkEndCommandBuffer(cmds.command_buffer); if (result != VK_SUCCESS) { error(user_context) << "vkEndCommandBuffer returned " << vk_get_error_name(result) << "\n"; if (to_host) { @@ -958,12 +967,12 @@ WEAK int halide_vulkan_buffer_copy(void *user_context, struct halide_buffer_t *s nullptr, // semaphores nullptr, // pipeline stages where semaphore waits occur 1, // how many command buffers to execute - &command_buffer, // the command buffers + &(cmds.command_buffer), // the command buffers 0, // number of semaphores to signal nullptr // the semaphores to signal }; - result = vkQueueSubmit(ctx.queue, 1, &submit_info, 0); + result = vkQueueSubmit(ctx.queue, 1, &submit_info, VK_NULL_HANDLE); if (result != VK_SUCCESS) { error(user_context) << "vkQueueSubmit returned " << vk_get_error_name(result) << "\n"; return result; @@ -1007,13 +1016,6 @@ WEAK int halide_vulkan_buffer_copy(void *user_context, struct halide_buffer_t *s if (error_code != halide_error_code_success) { error(user_context) << "Vulkan: Failed to release staging region allocation!\n"; - return error_code; - } - - error_code = vk_destroy_command_buffer(user_context, ctx.allocator, ctx.command_pool, command_buffer); - if (error_code != halide_error_code_success) { - error(user_context) << "Vulkan: Failed to destroy command buffer!\n"; - return error_code; } #ifdef DEBUG_RUNTIME @@ -1090,6 +1092,7 @@ WEAK int halide_vulkan_run(void *user_context, #ifdef DEBUG_RUNTIME debug(user_context) << "halide_vulkan_run (user_context: " << user_context << ", " + << "state_ptr: " << state_ptr << ", " << "entry: " << entry_name << ", " << "blocks: " << blocksX << "x" << blocksY << "x" << blocksZ << ", " << "threads: " << threadsX << "x" << threadsY << "x" << threadsZ << ", " @@ -1147,36 +1150,43 @@ WEAK int halide_vulkan_run(void *user_context, // 1a. Locate the correct entry point from the cache bool found_entry_point = false; uint32_t entry_point_index = 0; - for (uint32_t n = 0; (n < cache_entry->shader_count) && !found_entry_point; ++n) { - if (strcmp(cache_entry->shader_bindings[n].entry_point_name, entry_name) == 0) { - entry_point_index = n; - found_entry_point = true; + VulkanCompiledShaderModule *shader_module = nullptr; + for (uint32_t m = 0; m < cache_entry->module_count; m++) { + VulkanCompiledShaderModule *compiled_shader = cache_entry->compiled_modules[m]; + for (uint32_t n = 0; (n < compiled_shader->shader_count) && !found_entry_point; ++n) { + if (strcmp(compiled_shader->shader_bindings[n].entry_point_name, entry_name) == 0) { + shader_module = compiled_shader; + entry_point_index = n; + found_entry_point = true; + } } } - if (!found_entry_point || (entry_point_index >= cache_entry->shader_count)) { + + if (!found_entry_point || (entry_point_index >= shader_module->shader_count) || (shader_module == nullptr)) { error(user_context) << "Vulkan: Failed to locate shader entry point! Unable to proceed!\n"; return halide_error_code_internal_error; } + debug(user_context) << " found entry point [" - << (entry_point_index + 1) << " of " << cache_entry->shader_count + << (entry_point_index + 1) << " of " << shader_module->shader_count << "] '" << entry_name << "'\n"; // 2. Create objects for execution - if (cache_entry->descriptor_set_layouts == nullptr) { + if (shader_module->descriptor_set_layouts == nullptr) { error(user_context) << "Vulkan: Missing descriptor set layouts! Unable to proceed!\n"; return halide_error_code_internal_error; } int error_code = halide_error_code_success; - if (cache_entry->pipeline_layout == 0) { + if (shader_module->pipeline_layout == VK_NULL_HANDLE) { // 2a. Create all descriptor set layouts - for (uint32_t n = 0; n < cache_entry->shader_count; ++n) { - if (((void *)cache_entry->descriptor_set_layouts[n]) == nullptr) { - uint32_t uniform_buffer_count = cache_entry->shader_bindings[n].uniform_buffer_count; - uint32_t storage_buffer_count = cache_entry->shader_bindings[n].storage_buffer_count; - debug(user_context) << " creating descriptor set layout [" << n << "] " << cache_entry->shader_bindings[n].entry_point_name << "\n"; - error_code = vk_create_descriptor_set_layout(user_context, ctx.allocator, uniform_buffer_count, storage_buffer_count, &(cache_entry->descriptor_set_layouts[n])); + for (uint32_t n = 0; n < shader_module->shader_count; ++n) { + if (((void *)shader_module->descriptor_set_layouts[n]) == VK_NULL_HANDLE) { + uint32_t uniform_buffer_count = shader_module->shader_bindings[n].uniform_buffer_count; + uint32_t storage_buffer_count = shader_module->shader_bindings[n].storage_buffer_count; + debug(user_context) << " creating descriptor set layout [" << n << "] " << shader_module->shader_bindings[n].entry_point_name << "\n"; + error_code = vk_create_descriptor_set_layout(user_context, ctx.allocator, uniform_buffer_count, storage_buffer_count, &(shader_module->descriptor_set_layouts[n])); if (error_code != halide_error_code_success) { error(user_context) << "Vulkan: Failed to create descriptor set layout!\n"; return error_code; @@ -1185,7 +1195,7 @@ WEAK int halide_vulkan_run(void *user_context, } // 2b. Create the pipeline layout - error_code = vk_create_pipeline_layout(user_context, ctx.allocator, cache_entry->shader_count, cache_entry->descriptor_set_layouts, &(cache_entry->pipeline_layout)); + error_code = vk_create_pipeline_layout(user_context, ctx.allocator, shader_module->shader_count, shader_module->descriptor_set_layouts, &(shader_module->pipeline_layout)); if (error_code != halide_error_code_success) { error(user_context) << "Vulkan: Failed to create pipeline layout!\n"; return error_code; @@ -1201,17 +1211,17 @@ WEAK int halide_vulkan_run(void *user_context, dispatch_data.local_size[1] = threadsY; dispatch_data.local_size[2] = threadsZ; - VulkanShaderBinding *entry_point_binding = (cache_entry->shader_bindings + entry_point_index); + VulkanShaderBinding *entry_point_binding = (shader_module->shader_bindings + entry_point_index); // 2c. Setup the compute pipeline (eg override any specializations for shared mem or workgroup size) - error_code = vk_setup_compute_pipeline(user_context, ctx.allocator, entry_point_binding, &dispatch_data, cache_entry->shader_module, cache_entry->pipeline_layout, &(entry_point_binding->compute_pipeline)); + error_code = vk_setup_compute_pipeline(user_context, ctx.allocator, entry_point_binding, &dispatch_data, shader_module->shader_module, shader_module->pipeline_layout, &(entry_point_binding->compute_pipeline)); if (error_code != halide_error_code_success) { error(user_context) << "Vulkan: Failed to setup compute pipeline!\n"; return error_code; } // 2d. Create a descriptor set - if (entry_point_binding->descriptor_set == 0) { + if (entry_point_binding->descriptor_set == VK_NULL_HANDLE) { // Construct a descriptor pool // @@ -1227,7 +1237,7 @@ WEAK int halide_vulkan_run(void *user_context, } // Create the descriptor set - error_code = vk_create_descriptor_set(user_context, ctx.allocator, cache_entry->descriptor_set_layouts[entry_point_index], entry_point_binding->descriptor_pool, &(entry_point_binding->descriptor_set)); + error_code = vk_create_descriptor_set(user_context, ctx.allocator, shader_module->descriptor_set_layouts[entry_point_index], entry_point_binding->descriptor_pool, &(entry_point_binding->descriptor_set)); if (error_code != halide_error_code_success) { error(user_context) << "Vulkan: Unable to create shader module ... failed to create descriptor set!\n"; return error_code; @@ -1269,19 +1279,18 @@ WEAK int halide_vulkan_run(void *user_context, return error_code; } - // 4. Create a command buffer from the command pool - VkCommandBuffer command_buffer; - error_code = vk_create_command_buffer(user_context, ctx.allocator, ctx.command_pool, &command_buffer); - if (error_code != halide_error_code_success) { - error(user_context) << "Vulkan: Failed to create command buffer!\n"; - return error_code; + // 4. Create a command buffer and pool + ScopedVulkanCommandBufferAndPool cmds(user_context, ctx.allocator, ctx.queue_family_index); + if (cmds.error_code != halide_error_code_success) { + error(user_context) << "Vulkan: Failed to create command buffer and pool!\n"; + return cmds.error_code; } // 5. Fill the command buffer error_code = vk_fill_command_buffer_with_dispatch_call(user_context, - ctx.device, command_buffer, + ctx.device, cmds.command_buffer, entry_point_binding->compute_pipeline, - cache_entry->pipeline_layout, + shader_module->pipeline_layout, entry_point_binding->descriptor_set, entry_point_index, blocksX, blocksY, blocksZ); @@ -1291,7 +1300,7 @@ WEAK int halide_vulkan_run(void *user_context, } // 6. Submit the command buffer to our command queue - error_code = vk_submit_command_buffer(user_context, ctx.queue, command_buffer); + error_code = vk_submit_command_buffer(user_context, ctx.queue, cmds.command_buffer); if (error_code != halide_error_code_success) { error(user_context) << "Vulkan: Failed to fill submit command buffer!\n"; return error_code; @@ -1304,14 +1313,6 @@ WEAK int halide_vulkan_run(void *user_context, return halide_error_code_generic_error; } - // 8. Cleanup - error_code = vk_destroy_command_buffer(user_context, ctx.allocator, ctx.command_pool, command_buffer); - vkResetCommandPool(ctx.device, ctx.command_pool, VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT); - if (error_code != halide_error_code_success) { - error(user_context) << "Vulkan: Failed to destroy command buffer!\n"; - return error_code; - } - #ifdef DEBUG_RUNTIME debug(user_context) << "halide_vulkan_run: blocks_allocated=" << (uint32_t)ctx.allocator->blocks_allocated() << " " @@ -1386,7 +1387,9 @@ WEAK int halide_vulkan_release_unused_device_allocations(void *user_context) { } // collect all unused allocations - ctx.allocator->collect(user_context); + if (ctx.allocator) { + ctx.allocator->collect(user_context); + } return halide_error_code_success; } @@ -1398,16 +1401,19 @@ WEAK __attribute__((constructor)) void register_vulkan_allocation_pool() { } WEAK __attribute__((destructor)) void halide_vulkan_cleanup() { - // FIXME: When the VK_LAYER_KHRONOS_validation is intercepting calls to the API, it will - // cause a segfault if it's invoked inside the destructor since it uses it's own global - // state to track object usage which is no longer valid once this call is invoked. - // Calling this destructor with the validator hooks in place will cause an uncaught - // exception for an uninitialized mutex lock. We can avoid the crash on exit by \ - // bypassing the device release call and leak (!!!!) - // ISSUE: https://github.com/halide/Halide/issues/8290 - const char *layer_names = vk_get_layer_names_internal(nullptr); - bool has_validation_layer = strstr(layer_names, "VK_LAYER_KHRONOS_validation"); - if (!has_validation_layer) { + // NOTE: In some cases, we've observed the NVIDIA driver causing a segfault + // at process exit. It appears to be triggered by running multiple + // processes that use the Vulkan API, whereupon, one of the libs in + // their driver stack may crash inside the finalizer. Unfortunately, + // any attempt to avoid it may also crash, since the function pointers + // obtained from the Vulkan loader appear to be invalid. + // + // https://github.com/halide/Halide/issues/8497 + // + // So, we don't do any special handling here ... just clean up like + // the other runtimes do. + // + if (halide_vulkan_is_initialized()) { halide_vulkan_device_release(nullptr); } } diff --git a/src/runtime/vulkan_context.h b/src/runtime/vulkan_context.h index f95cb441fb4a..9e82b9fb5b64 100644 --- a/src/runtime/vulkan_context.h +++ b/src/runtime/vulkan_context.h @@ -22,15 +22,15 @@ namespace Vulkan { halide_vulkan_memory_allocator *WEAK cached_allocator = nullptr; // Cached instance related handles for device resources -VkInstance WEAK cached_instance = nullptr; -VkDevice WEAK cached_device = nullptr; -VkCommandPool WEAK cached_command_pool = VkInvalidCommandPool; -VkQueue WEAK cached_queue = nullptr; -VkPhysicalDevice WEAK cached_physical_device = nullptr; +VkInstance WEAK cached_instance = VK_NULL_HANDLE; +VkDevice WEAK cached_device = VK_NULL_HANDLE; +VkQueue WEAK cached_queue = VK_NULL_HANDLE; +VkPhysicalDevice WEAK cached_physical_device = VK_NULL_HANDLE; uint32_t WEAK cached_queue_family_index = 0; +VkDebugUtilsMessengerEXT WEAK cached_messenger = VK_NULL_HANDLE; // A Vulkan context/queue/synchronization lock defined in this module with weak linkage -volatile ScopedSpinLock::AtomicFlag WEAK thread_lock = 0; +WEAK halide_mutex thread_lock; // -------------------------------------------------------------------------- @@ -40,12 +40,12 @@ class VulkanContext { public: VulkanMemoryAllocator *allocator = nullptr; - VkInstance instance = nullptr; - VkDevice device = nullptr; - VkCommandPool command_pool = VkInvalidCommandPool; - VkPhysicalDevice physical_device = nullptr; - VkQueue queue = nullptr; + VkInstance instance = VK_NULL_HANDLE; + VkDevice device = VK_NULL_HANDLE; + VkPhysicalDevice physical_device = VK_NULL_HANDLE; + VkQueue queue = VK_NULL_HANDLE; uint32_t queue_family_index = 0; // used for operations requiring queue family + VkDebugUtilsMessengerEXT messenger = VK_NULL_HANDLE; halide_error_code_t error = halide_error_code_success; HALIDE_ALWAYS_INLINE explicit VulkanContext(void *user_context) @@ -53,21 +53,20 @@ class VulkanContext { int result = halide_vulkan_acquire_context(user_context, reinterpret_cast(&allocator), - &instance, &device, &physical_device, &command_pool, &queue, &queue_family_index); + &instance, &device, &physical_device, &queue, &queue_family_index, &messenger); if (result != halide_error_code_success) { error = halide_error_code_device_interface_no_device; halide_error_no_device_interface(user_context); } halide_debug_assert(user_context, allocator != nullptr); - halide_debug_assert(user_context, instance != nullptr); - halide_debug_assert(user_context, device != nullptr); - halide_debug_assert(user_context, command_pool != 0); - halide_debug_assert(user_context, queue != nullptr); - halide_debug_assert(user_context, physical_device != nullptr); + halide_debug_assert(user_context, instance != VK_NULL_HANDLE); + halide_debug_assert(user_context, device != VK_NULL_HANDLE); + halide_debug_assert(user_context, queue != VK_NULL_HANDLE); + halide_debug_assert(user_context, physical_device != VK_NULL_HANDLE); } HALIDE_ALWAYS_INLINE ~VulkanContext() { - halide_vulkan_release_context(user_context, instance, device, queue); + halide_vulkan_release_context(user_context, instance, device, queue, messenger); } // For now, this is always nullptr @@ -83,9 +82,9 @@ namespace { int vk_find_compute_capability(void *user_context, int *major, int *minor) { debug(user_context) << " vk_find_compute_capability (user_context: " << user_context << ")\n"; - VkInstance instance = nullptr; - VkDevice device = nullptr; - VkPhysicalDevice physical_device = nullptr; + VkInstance instance = VK_NULL_HANDLE; + VkDevice device = VK_NULL_HANDLE; + VkPhysicalDevice physical_device = VK_NULL_HANDLE; uint32_t queue_family_index = 0; if (!lib_vulkan) { @@ -121,15 +120,6 @@ int vk_find_compute_capability(void *user_context, int *major, int *minor) { return 0; } - if (vkCreateDevice == nullptr) { - vk_load_vulkan_functions(user_context, instance); - if (vkCreateDevice == nullptr) { - debug(user_context) << " no valid vulkan runtime was found ...\n"; - *major = *minor = 0; - return halide_error_code_success; - } - } - status = vk_select_device_for_context(user_context, &instance, &device, &physical_device, &queue_family_index); if (status != halide_error_code_success) { debug(user_context) << " no valid vulkan device was found ...\n"; @@ -155,6 +145,9 @@ int vk_create_instance(void *user_context, const StringTable &requested_layers, StringTable required_instance_extensions; vk_get_required_instance_extensions(user_context, required_instance_extensions); + StringTable optional_instance_extensions; + vk_get_optional_instance_extensions(user_context, optional_instance_extensions); + StringTable supported_instance_extensions; vk_get_supported_instance_extensions(user_context, supported_instance_extensions); @@ -166,6 +159,15 @@ int vk_create_instance(void *user_context, const StringTable &requested_layers, debug(user_context) << " extension: " << required_instance_extensions[n] << "\n"; } + // enable all available optional extensions + debug(user_context) << " checking for " << (uint32_t)optional_instance_extensions.size() << " optional extensions for instance ...\n"; + for (uint32_t n = 0; n < optional_instance_extensions.size(); ++n) { + if (supported_instance_extensions.contains(optional_instance_extensions[n])) { + debug(user_context) << " optional extension: " << optional_instance_extensions[n] << "\n"; + required_instance_extensions.append(user_context, optional_instance_extensions[n]); + } + } + // If we're running under Molten VK, we must enable the portability extension and create flags // to allow non-physical devices that are emulated to appear in the device list. uint32_t create_flags = 0; @@ -199,12 +201,19 @@ int vk_create_instance(void *user_context, const StringTable &requested_layers, return halide_error_code_device_interface_no_device; } + vk_load_vulkan_instance_functions(user_context, *instance); + if (vkCreateDevice == nullptr) { + error(user_context) << "Vulkan: Failed to resolve instance API methods to create device!\n"; + return halide_error_code_symbol_not_found; + } + return halide_error_code_success; } int vk_destroy_instance(void *user_context, VkInstance instance, const VkAllocationCallbacks *alloc_callbacks) { - debug(user_context) << " vk_destroy_instance (user_context: " << user_context << ")\n"; + debug(user_context) << " vk_destroy_instance (user_context: " << user_context << ", device: " << (void *)instance << ", alloc_callbacks: " << (void *)alloc_callbacks << ")\n"; vkDestroyInstance(instance, alloc_callbacks); + vk_unload_vulkan_instance_functions(user_context); return halide_error_code_success; } @@ -230,7 +239,7 @@ int vk_select_device_for_context(void *user_context, BlockStorage device_query_storage(user_context, device_query_storage_config); device_query_storage.resize(user_context, device_count); - VkPhysicalDevice chosen_device = nullptr; + VkPhysicalDevice chosen_device = VK_NULL_HANDLE; VkPhysicalDevice *avail_devices = (VkPhysicalDevice *)(device_query_storage.data()); if (avail_devices == nullptr) { debug(user_context) << "Vulkan: Out of system memory!\n"; @@ -247,7 +256,7 @@ int vk_select_device_for_context(void *user_context, // try to find a matching device that supports compute. uint32_t queue_family = 0; - for (uint32_t i = 0; (chosen_device == nullptr) && (i < device_count); i++) { + for (uint32_t i = 0; (chosen_device == VK_NULL_HANDLE) && (i < device_count); i++) { VkPhysicalDeviceProperties properties; vkGetPhysicalDeviceProperties(avail_devices[i], &properties); debug(user_context) << "Vulkan: Checking device #" << i << "='" << properties.deviceName << "'\n"; @@ -439,14 +448,27 @@ int vk_create_device(void *user_context, const StringTable &requested_layers, Vk return halide_error_code_device_interface_no_device; } - vkGetDeviceQueue(cached_device, *queue_family_index, 0, queue); + vk_load_vulkan_device_functions(user_context, *device); + if (vkAllocateMemory == nullptr) { + error(user_context) << "Vulkan: Failed to resolve device API methods for driver!\n"; + return halide_error_code_symbol_not_found; + } + + vkGetDeviceQueue(*device, *queue_family_index, 0, queue); + return halide_error_code_success; +} + +int vk_destroy_device(void *user_context, VkDevice device, const VkAllocationCallbacks *alloc_callbacks) { + debug(user_context) << " vk_destroy_device (user_context: " << user_context << ", device: " << (void *)device << ", alloc_callbacks: " << (void *)alloc_callbacks << ")\n"; + vkDestroyDevice(device, alloc_callbacks); + vk_unload_vulkan_device_functions(user_context); return halide_error_code_success; } // Initializes the context (used by the default implementation of halide_acquire_context) int vk_create_context(void *user_context, VulkanMemoryAllocator **allocator, VkInstance *instance, VkDevice *device, VkPhysicalDevice *physical_device, - VkCommandPool *command_pool, VkQueue *queue, uint32_t *queue_family_index) { + VkQueue *queue, uint32_t *queue_family_index, VkDebugUtilsMessengerEXT *messenger) { debug(user_context) << " vk_create_context (user_context: " << user_context << ")\n"; @@ -472,14 +494,6 @@ int vk_create_context(void *user_context, VulkanMemoryAllocator **allocator, return error_code; } - if (vkCreateDevice == nullptr) { - vk_load_vulkan_functions(user_context, *instance); - if (vkCreateDevice == nullptr) { - error(user_context) << "Vulkan: Failed to resolve API library methods to create device!\n"; - return halide_error_code_symbol_not_found; - } - } - error_code = vk_select_device_for_context(user_context, instance, device, physical_device, queue_family_index); if (error_code != halide_error_code_success) { error(user_context) << "Vulkan: Failed to select device for context!\n"; @@ -498,12 +512,7 @@ int vk_create_context(void *user_context, VulkanMemoryAllocator **allocator, return halide_error_code_generic_error; } - error_code = vk_create_command_pool(user_context, *allocator, *queue_family_index, command_pool); - if (error_code != halide_error_code_success) { - error(user_context) << "Vulkan: Failed to create command pool for context!\n"; - return error_code; - } - + vk_create_debug_utils_messenger(user_context, *instance, *allocator, messenger); return halide_error_code_success; } @@ -511,30 +520,101 @@ int vk_create_context(void *user_context, VulkanMemoryAllocator **allocator, // NOTE: This should be called inside an acquire_context/release_context scope int vk_destroy_context(void *user_context, VulkanMemoryAllocator *allocator, VkInstance instance, VkDevice device, VkPhysicalDevice physical_device, - VkCommandPool command_pool, VkQueue queue) { + VkQueue queue, VkDebugUtilsMessengerEXT messenger) { debug(user_context) << "vk_destroy_context (user_context: " << user_context << ")\n"; - if (device != nullptr) { - vkDeviceWaitIdle(device); + if (vkCreateInstance == nullptr) { + return halide_error_code_success; } - if ((command_pool != VkInvalidCommandPool) && (allocator != nullptr)) { - vk_destroy_command_pool(user_context, allocator, command_pool); + + if (queue != VK_NULL_HANDLE) { + VkResult result = vkQueueWaitIdle(queue); + if (result != VK_SUCCESS) { + error(user_context) << "Vulkan: vkQueueWaitIdle returned " << vk_get_error_name(result) << "\n"; + return halide_error_code_generic_error; + } + } + if (device != VK_NULL_HANDLE) { + VkResult result = vkDeviceWaitIdle(device); + if (result != VK_SUCCESS) { + error(user_context) << "Vulkan: vkDeviceWaitIdle returned " << vk_get_error_name(result) << "\n"; + return halide_error_code_generic_error; + } + } + + if (allocator != nullptr) { vk_destroy_shader_modules(user_context, allocator); vk_destroy_memory_allocator(user_context, allocator); + vk_destroy_debug_utils_messenger(user_context, instance, allocator, messenger); } - if (device != nullptr) { - vkDestroyDevice(device, nullptr); + + const VkAllocationCallbacks *alloc_callbacks = halide_vulkan_get_allocation_callbacks(user_context); + if (device != VK_NULL_HANDLE) { + vk_destroy_device(user_context, device, alloc_callbacks); } - if (instance != nullptr) { - vkDestroyInstance(instance, nullptr); + if (instance != VK_NULL_HANDLE) { + vk_destroy_instance(user_context, instance, alloc_callbacks); } return halide_error_code_success; } // -------------------------------------------------------------------------- +VKAPI_ATTR VkBool32 VKAPI_CALL vk_debug_utils_messenger_callback( + VkDebugUtilsMessageSeverityFlagBitsEXT message_severity, + VkDebugUtilsMessageTypeFlagsEXT message_type, + const VkDebugUtilsMessengerCallbackDataEXT *callback_data, + void *user_data) { + if (message_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT) { + print(user_data) << "Vulkan [WARNING]: (user_context=" << user_data << ", " + << "id=" << callback_data->messageIdNumber << ", " + << "name:" << callback_data->pMessageIdName << ") " + << callback_data->pMessage << "\n"; + } else if (message_severity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) { + print(user_data) << "Vulkan [ERROR]: (user_context=" << user_data << ", " + << "id=" << callback_data->messageIdNumber << ", " + << "name:" << callback_data->pMessageIdName << ") " + << callback_data->pMessage << "\n"; + } else { + + debug(user_data) << "Vulkan [DEBUG]: (user_context=" << user_data << ", " + << "id=" << callback_data->messageIdNumber << ", " + << "name:" << callback_data->pMessageIdName << ") " + << callback_data->pMessage << "\n"; + } + return VK_FALSE; +} + +VkResult vk_create_debug_utils_messenger(void *user_context, VkInstance instance, VulkanMemoryAllocator *allocator, VkDebugUtilsMessengerEXT *messenger) { + PFN_vkCreateDebugUtilsMessengerEXT func = (PFN_vkCreateDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance, "vkCreateDebugUtilsMessengerEXT"); + if (func != nullptr) { + debug(user_context) << "Vulkan: Registering Debug Utils Messenger ... \n"; + VkDebugUtilsMessengerCreateInfoEXT create_info{}; + create_info.flags = 0; + create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; + create_info.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT; + create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT; + create_info.pfnUserCallback = vk_debug_utils_messenger_callback; + create_info.pUserData = user_context; + return func(instance, &create_info, allocator->callbacks(), messenger); + } else { + debug(user_context) << "Vulkan: Debug Utils Messenger extension unavailable!\n"; + return VK_ERROR_EXTENSION_NOT_PRESENT; + } +} + +void vk_destroy_debug_utils_messenger(void *user_context, VkInstance instance, VulkanMemoryAllocator *allocator, VkDebugUtilsMessengerEXT messenger) { + PFN_vkDestroyDebugUtilsMessengerEXT func = (PFN_vkDestroyDebugUtilsMessengerEXT)vkGetInstanceProcAddr(instance, "vkDestroyDebugUtilsMessengerEXT"); + if (func != nullptr) { + debug(user_context) << "Vulkan: Destroying Debug Utils Messenger ...\n"; + func(instance, messenger, allocator->callbacks()); + } +} + +// -------------------------------------------------------------------------- + } // namespace } // namespace Vulkan } // namespace Internal diff --git a/src/runtime/vulkan_extensions.h b/src/runtime/vulkan_extensions.h index 058a91605001..c2e4ea42ff1e 100644 --- a/src/runtime/vulkan_extensions.h +++ b/src/runtime/vulkan_extensions.h @@ -137,12 +137,19 @@ uint32_t vk_get_requested_layers(void *user_context, StringTable &layer_table) { } uint32_t vk_get_required_instance_extensions(void *user_context, StringTable &ext_table) { - const char *required_ext_table[] = {"VK_KHR_get_physical_device_properties2"}; + const char *required_ext_table[] = {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME}; const uint32_t required_ext_count = sizeof(required_ext_table) / sizeof(required_ext_table[0]); ext_table.fill(user_context, (const char **)required_ext_table, required_ext_count); return required_ext_count; } +uint32_t vk_get_optional_instance_extensions(void *user_context, StringTable &ext_table) { + const char *optional_ext_table[] = {VK_EXT_DEBUG_UTILS_EXTENSION_NAME}; + const uint32_t optional_ext_count = sizeof(optional_ext_table) / sizeof(optional_ext_table[0]); + ext_table.fill(user_context, (const char **)optional_ext_table, optional_ext_count); + return optional_ext_count; +} + uint32_t vk_get_supported_instance_extensions(void *user_context, StringTable &ext_table) { PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = (PFN_vkEnumerateInstanceExtensionProperties) @@ -185,7 +192,9 @@ uint32_t vk_get_supported_instance_extensions(void *user_context, StringTable &e } uint32_t vk_get_required_device_extensions(void *user_context, StringTable &ext_table) { - const char *required_ext_table[] = {"VK_KHR_8bit_storage", "VK_KHR_storage_buffer_storage_class"}; + const char *required_ext_table[] = { + VK_KHR_8BIT_STORAGE_EXTENSION_NAME, + VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME}; const uint32_t required_ext_count = sizeof(required_ext_table) / sizeof(required_ext_table[0]); ext_table.fill(user_context, (const char **)required_ext_table, required_ext_count); return required_ext_count; @@ -194,9 +203,10 @@ uint32_t vk_get_required_device_extensions(void *user_context, StringTable &ext_ uint32_t vk_get_optional_device_extensions(void *user_context, StringTable &ext_table) { const char *optional_ext_table[] = { "VK_KHR_portability_subset", //< necessary for running under Molten (aka Vulkan on Mac) - "VK_KHR_16bit_storage", - "VK_KHR_shader_float16_int8", - "VK_KHR_shader_float_controls"}; + VK_KHR_MAINTENANCE_5_EXTENSION_NAME, + VK_KHR_16BIT_STORAGE_EXTENSION_NAME, + VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME, + VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME}; const uint32_t optional_ext_count = sizeof(optional_ext_table) / sizeof(optional_ext_table[0]); ext_table.fill(user_context, (const char **)optional_ext_table, optional_ext_count); return optional_ext_count; diff --git a/src/runtime/vulkan_functions.h b/src/runtime/vulkan_functions.h index 22cb28d750e5..90938896f4cc 100644 --- a/src/runtime/vulkan_functions.h +++ b/src/runtime/vulkan_functions.h @@ -1,47 +1,234 @@ -// NOTE: vkCreateInstance and vkGetInstanceProcAddr are defined in the loader library and will be resolved seperately -// The rest of these are resolved via vkGetInstanceProcAddr which the loader exports and maps to the driver implementation -VULKAN_FN(vkDestroyInstance) +// NOTE: There are three sets of functions that each have to be resolved separately. +// 1. Loader Functions: Used to query and create an instance to use +// 2. Instance Functions: Used to query and create a device to use +// 3. Device Functions: Used to interact directly with the device driver +#ifdef HL_USE_VULKAN_LOADER_FNS +// v1.0+ +VULKAN_FN(vkCreateInstance) +VULKAN_FN(vkGetInstanceProcAddr) +VULKAN_FN(vkEnumerateInstanceExtensionProperties) +VULKAN_FN(vkEnumerateInstanceLayerProperties) +// v1.1+ +VULKAN_FN(vkEnumerateInstanceVersion) +#endif +#ifdef HL_USE_VULKAN_INSTANCE_FNS +// v1.0+ VULKAN_FN(vkCreateDevice) -VULKAN_FN(vkDeviceWaitIdle) -VULKAN_FN(vkDestroyDevice) -VULKAN_FN(vkGetDeviceQueue) -VULKAN_FN(vkCreateBuffer) -VULKAN_FN(vkDestroyBuffer) -VULKAN_FN(vkDestroyCommandPool) -VULKAN_FN(vkDestroyDescriptorPool) -VULKAN_FN(vkDestroyDescriptorSetLayout) -VULKAN_FN(vkDestroyPipeline) -VULKAN_FN(vkDestroyPipelineLayout) -VULKAN_FN(vkDestroyShaderModule) +VULKAN_FN(vkDestroyInstance) VULKAN_FN(vkEnumerateDeviceExtensionProperties) +VULKAN_FN(vkEnumerateDeviceLayerProperties) VULKAN_FN(vkEnumeratePhysicalDevices) +VULKAN_FN(vkGetDeviceProcAddr) VULKAN_FN(vkGetPhysicalDeviceFeatures) +VULKAN_FN(vkGetPhysicalDeviceFormatProperties) +// VULKAN_FN(vkGetPhysicalDeviceImageFormatProperties) +VULKAN_FN(vkGetPhysicalDeviceMemoryProperties) VULKAN_FN(vkGetPhysicalDeviceProperties) VULKAN_FN(vkGetPhysicalDeviceQueueFamilyProperties) -VULKAN_FN(vkGetBufferMemoryRequirements) -VULKAN_FN(vkCreateShaderModule) -VULKAN_FN(vkCreateDescriptorSetLayout) -VULKAN_FN(vkCreatePipelineLayout) -VULKAN_FN(vkCreateComputePipelines) -VULKAN_FN(vkCreateDescriptorPool) +// VULKAN_FN(vkGetPhysicalDeviceSparseImageFormatProperties) +// v1.1+ +VULKAN_FN(vkEnumeratePhysicalDeviceGroups) +VULKAN_FN(vkGetPhysicalDeviceExternalBufferProperties) +VULKAN_FN(vkGetPhysicalDeviceExternalFenceProperties) +VULKAN_FN(vkGetPhysicalDeviceExternalSemaphoreProperties) +VULKAN_FN(vkGetPhysicalDeviceFeatures2) +VULKAN_FN(vkGetPhysicalDeviceFormatProperties2) +// VULKAN_FN(vkGetPhysicalDeviceImageFormatProperties2) +VULKAN_FN(vkGetPhysicalDeviceMemoryProperties2) +VULKAN_FN(vkGetPhysicalDeviceProperties2) +VULKAN_FN(vkGetPhysicalDeviceQueueFamilyProperties2) +// VULKAN_FN(vkGetPhysicalDeviceSparseImageFormatProperties2) +// v1.3+ +VULKAN_FN(vkGetPhysicalDeviceToolProperties) +#endif +#ifdef HL_USE_VULKAN_DEVICE_FNS +// v1.0+ +VULKAN_FN(vkAllocateCommandBuffers) VULKAN_FN(vkAllocateDescriptorSets) -VULKAN_FN(vkGetPhysicalDeviceMemoryProperties) VULKAN_FN(vkAllocateMemory) -VULKAN_FN(vkBindBufferMemory) -VULKAN_FN(vkMapMemory) -VULKAN_FN(vkUnmapMemory) -VULKAN_FN(vkFreeMemory) -VULKAN_FN(vkUpdateDescriptorSets) -VULKAN_FN(vkCreateCommandPool) -VULKAN_FN(vkResetCommandPool) -VULKAN_FN(vkAllocateCommandBuffers) -VULKAN_FN(vkFreeCommandBuffers) VULKAN_FN(vkBeginCommandBuffer) -VULKAN_FN(vkCmdBindPipeline) +VULKAN_FN(vkBindBufferMemory) +// VULKAN_FN(vkBindImageMemory) +VULKAN_FN(vkCmdBeginQuery) +// VULKAN_FN(vkCmdBeginRenderPass) VULKAN_FN(vkCmdBindDescriptorSets) +// VULKAN_FN(vkCmdBindIndexBuffer) +VULKAN_FN(vkCmdBindPipeline) +// VULKAN_FN(vkCmdBindVertexBuffers) +// VULKAN_FN(vkCmdBlitImage) +// VULKAN_FN(vkCmdClearAttachments) +// VULKAN_FN(vkCmdClearColorImage) +// VULKAN_FN(vkCmdClearDepthStencilImage) +VULKAN_FN(vkCmdCopyBuffer) +// VULKAN_FN(vkCmdCopyBufferToImage) +// VULKAN_FN(vkCmdCopyImage) +// VULKAN_FN(vkCmdCopyImageToBuffer) +VULKAN_FN(vkCmdCopyQueryPoolResults) VULKAN_FN(vkCmdDispatch) +VULKAN_FN(vkCmdDispatchIndirect) +// VULKAN_FN(vkCmdDraw) +// VULKAN_FN(vkCmdDrawIndexed) +// VULKAN_FN(vkCmdDrawIndexedIndirect) +// VULKAN_FN(vkCmdDrawIndirect) +VULKAN_FN(vkCmdEndQuery) +// VULKAN_FN(vkCmdEndRenderPass) +VULKAN_FN(vkCmdExecuteCommands) +VULKAN_FN(vkCmdFillBuffer) +// VULKAN_FN(vkCmdNextSubpass) +VULKAN_FN(vkCmdPipelineBarrier) +VULKAN_FN(vkCmdPushConstants) +VULKAN_FN(vkCmdResetEvent) +VULKAN_FN(vkCmdResetQueryPool) +// VULKAN_FN(vkCmdResolveImage) +VULKAN_FN(vkCmdSetBlendConstants) +VULKAN_FN(vkCmdSetDepthBias) +VULKAN_FN(vkCmdSetDepthBounds) +VULKAN_FN(vkCmdSetEvent) +// VULKAN_FN(vkCmdSetLineWidth) +// VULKAN_FN(vkCmdSetScissor) +// VULKAN_FN(vkCmdSetStencilCompareMask) +// VULKAN_FN(vkCmdSetStencilReference) +// VULKAN_FN(vkCmdSetStencilWriteMask) +// VULKAN_FN(vkCmdSetViewport) +VULKAN_FN(vkCmdUpdateBuffer) +VULKAN_FN(vkCmdWaitEvents) +VULKAN_FN(vkCmdWriteTimestamp) +VULKAN_FN(vkCreateBuffer) +VULKAN_FN(vkCreateBufferView) +VULKAN_FN(vkCreateCommandPool) +VULKAN_FN(vkCreateComputePipelines) +VULKAN_FN(vkCreateDescriptorPool) +VULKAN_FN(vkCreateDescriptorSetLayout) +VULKAN_FN(vkCreateEvent) +VULKAN_FN(vkCreateFence) +// VULKAN_FN(vkCreateFramebuffer) +// VULKAN_FN(vkCreateGraphicsPipelines) +// VULKAN_FN(vkCreateImage) +// VULKAN_FN(vkCreateImageView) +VULKAN_FN(vkCreatePipelineCache) +VULKAN_FN(vkCreatePipelineLayout) +VULKAN_FN(vkCreateQueryPool) +// VULKAN_FN(vkCreateRenderPass) +// VULKAN_FN(vkCreateSampler) +VULKAN_FN(vkCreateSemaphore) +VULKAN_FN(vkCreateShaderModule) +VULKAN_FN(vkDestroyBuffer) +VULKAN_FN(vkDestroyBufferView) +VULKAN_FN(vkDestroyCommandPool) +VULKAN_FN(vkDestroyDescriptorPool) +VULKAN_FN(vkDestroyDescriptorSetLayout) +VULKAN_FN(vkDestroyDevice) +VULKAN_FN(vkDestroyEvent) +VULKAN_FN(vkDestroyFence) +// VULKAN_FN(vkDestroyFramebuffer) +// VULKAN_FN(vkDestroyImage) +// VULKAN_FN(vkDestroyImageView) +VULKAN_FN(vkDestroyPipeline) +VULKAN_FN(vkDestroyPipelineCache) +VULKAN_FN(vkDestroyPipelineLayout) +VULKAN_FN(vkDestroyQueryPool) +// VULKAN_FN(vkDestroyRenderPass) +// VULKAN_FN(vkDestroySampler) +VULKAN_FN(vkDestroySemaphore) +VULKAN_FN(vkDestroyShaderModule) +VULKAN_FN(vkDeviceWaitIdle) +VULKAN_FN(vkEndCommandBuffer) +VULKAN_FN(vkFlushMappedMemoryRanges) +VULKAN_FN(vkFreeCommandBuffers) +VULKAN_FN(vkFreeDescriptorSets) +VULKAN_FN(vkFreeMemory) +VULKAN_FN(vkGetBufferMemoryRequirements) +VULKAN_FN(vkGetDeviceMemoryCommitment) +VULKAN_FN(vkGetDeviceQueue) +VULKAN_FN(vkGetEventStatus) +VULKAN_FN(vkGetFenceStatus) +// VULKAN_FN(vkGetImageMemoryRequirements) +// VULKAN_FN(vkGetImageSparseMemoryRequirements) +// VULKAN_FN(vkGetImageSubresourceLayout) +VULKAN_FN(vkGetPipelineCacheData) +VULKAN_FN(vkGetQueryPoolResults) +// VULKAN_FN(vkGetRenderAreaGranularity) +VULKAN_FN(vkInvalidateMappedMemoryRanges) +VULKAN_FN(vkMapMemory) +VULKAN_FN(vkMergePipelineCaches) +// VULKAN_FN(vkQueueBindSparse) VULKAN_FN(vkQueueSubmit) VULKAN_FN(vkQueueWaitIdle) -VULKAN_FN(vkEndCommandBuffer) -VULKAN_FN(vkCmdFillBuffer) -VULKAN_FN(vkCmdCopyBuffer) \ No newline at end of file +VULKAN_FN(vkResetCommandBuffer) +VULKAN_FN(vkResetCommandPool) +VULKAN_FN(vkResetDescriptorPool) +VULKAN_FN(vkResetEvent) +VULKAN_FN(vkResetFences) +VULKAN_FN(vkSetEvent) +VULKAN_FN(vkUnmapMemory) +VULKAN_FN(vkUpdateDescriptorSets) +VULKAN_FN(vkWaitForFences) +// v1.1+ +VULKAN_FN(vkBindBufferMemory2) +// VULKAN_FN(vkBindImageMemory2) +VULKAN_FN(vkCmdDispatchBase) +VULKAN_FN(vkCmdSetDeviceMask) +VULKAN_FN(vkCreateDescriptorUpdateTemplate) +// VULKAN_FN(vkCreateSamplerYcbcrConversion) +VULKAN_FN(vkDestroyDescriptorUpdateTemplate) +// VULKAN_FN(vkDestroySamplerYcbcrConversion) +VULKAN_FN(vkGetBufferMemoryRequirements2) +VULKAN_FN(vkGetDescriptorSetLayoutSupport) +VULKAN_FN(vkGetDeviceGroupPeerMemoryFeatures) +VULKAN_FN(vkGetDeviceQueue2) +// VULKAN_FN(vkGetImageMemoryRequirements2) +// VULKAN_FN(vkGetImageSparseMemoryRequirements2) +VULKAN_FN(vkTrimCommandPool) +VULKAN_FN(vkUpdateDescriptorSetWithTemplate) +// v1.2+ +// VULKAN_FN(vkCmdBeginRenderPass2) +// VULKAN_FN(vkCmdDrawIndexedIndirectCount) +// VULKAN_FN(vkCmdDrawIndirectCount) +// VULKAN_FN(vkCmdEndRenderPass2) +// VULKAN_FN(vkCmdNextSubpass2) +// VULKAN_FN(vkCreateRenderPass2) +VULKAN_FN(vkGetBufferDeviceAddress) +VULKAN_FN(vkGetBufferOpaqueCaptureAddress) +VULKAN_FN(vkGetDeviceMemoryOpaqueCaptureAddress) +VULKAN_FN(vkGetSemaphoreCounterValue) +VULKAN_FN(vkResetQueryPool) +VULKAN_FN(vkSignalSemaphore) +VULKAN_FN(vkWaitSemaphores) +// v1.3+ +// VULKAN_FN(vkCmdBeginRendering) +// VULKAN_FN(vkCmdBindVertexBuffers2) +// VULKAN_FN(vkCmdBlitImage2) +VULKAN_FN(vkCmdCopyBuffer2) +// VULKAN_FN(vkCmdCopyBufferToImage2) +// VULKAN_FN(vkCmdCopyImage2) +// VULKAN_FN(vkCmdCopyImageToBuffer2) +// VULKAN_FN(vkCmdEndRendering) +VULKAN_FN(vkCmdPipelineBarrier2) +VULKAN_FN(vkCmdResetEvent2) +// VULKAN_FN(vkCmdResolveImage2) +// VULKAN_FN(vkCmdSetCullMode) +// VULKAN_FN(vkCmdSetDepthBiasEnable) +// VULKAN_FN(vkCmdSetDepthBoundsTestEnable) +// VULKAN_FN(vkCmdSetDepthCompareOp) +// VULKAN_FN(vkCmdSetDepthTestEnable) +// VULKAN_FN(vkCmdSetDepthWriteEnable) +VULKAN_FN(vkCmdSetEvent2) +// VULKAN_FN(vkCmdSetFrontFace) +// VULKAN_FN(vkCmdSetPrimitiveRestartEnable) +// VULKAN_FN(vkCmdSetPrimitiveTopology) +// VULKAN_FN(vkCmdSetRasterizerDiscardEnable) +// VULKAN_FN(vkCmdSetScissorWithCount) +// VULKAN_FN(vkCmdSetStencilOp) +// VULKAN_FN(vkCmdSetStencilTestEnable) +// VULKAN_FN(vkCmdSetViewportWithCount) +VULKAN_FN(vkCmdWaitEvents2) +VULKAN_FN(vkCmdWriteTimestamp2) +VULKAN_FN(vkCreatePrivateDataSlot) +VULKAN_FN(vkDestroyPrivateDataSlot) +VULKAN_FN(vkGetDeviceBufferMemoryRequirements) +// VULKAN_FN(vkGetDeviceImageMemoryRequirements) +// VULKAN_FN(vkGetDeviceImageSparseMemoryRequirements) +VULKAN_FN(vkGetPrivateData) +VULKAN_FN(vkQueueSubmit2) +VULKAN_FN(vkSetPrivateData) +#endif \ No newline at end of file diff --git a/src/runtime/vulkan_interface.h b/src/runtime/vulkan_interface.h index d7ff99c860ea..6dd6888fb54e 100644 --- a/src/runtime/vulkan_interface.h +++ b/src/runtime/vulkan_interface.h @@ -7,13 +7,6 @@ // Vulkan Specific Definitions // -------------------------------------------------------------------------- -// Vulkan API version identifier macro -#define VK_MAKE_API_VERSION(variant, major, minor, patch) \ - ((((uint32_t)(variant)) << 29) | (((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch))) - -// Vulkan API version 1.0.0 -#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0) // Patch version should always be set to 0 - // Environment variable string delimiter #ifdef WINDOWS #define HL_VK_ENV_DELIM ";" @@ -24,7 +17,7 @@ // Prototypes for the subset of the Vulkan API we need #define VK_NO_PROTOTYPES // NOLINTNEXTLINE -#include "mini_vulkan.h" +#include // -------------------------------------------------------------------------- // Vulkan API Definition @@ -77,27 +70,62 @@ extern "C" WEAK void *halide_vulkan_get_symbol(void *user_context, const char *n // Declare all the function pointers for the Vulkan API methods that will be resolved dynamically // clang-format off -#define VULKAN_FN(fn) WEAK PFN_##fn fn; -VULKAN_FN(vkCreateInstance) -VULKAN_FN(vkGetInstanceProcAddr) +#define VULKAN_FN(fn) WEAK PFN_##fn fn = nullptr; +#define HL_USE_VULKAN_LOADER_FNS +#define HL_USE_VULKAN_INSTANCE_FNS +#define HL_USE_VULKAN_DEVICE_FNS #include "vulkan_functions.h" +#undef HL_USE_VULKAN_DEVICE_FNS +#undef HL_USE_VULKAN_INSTANCE_FNS +#undef HL_USE_VULKAN_LOADER_FNS #undef VULKAN_FN // clang-format on -// Get the function pointers from the Vulkan loader to create an Instance (to find all available driver implementations) +// Get the function pointers to the Vulkan loader (to find all available instances) void WEAK vk_load_vulkan_loader_functions(void *user_context) { debug(user_context) << " vk_load_vulkan_loader_functions (user_context: " << user_context << ")\n"; #define VULKAN_FN(fn) fn = (PFN_##fn)halide_vulkan_get_symbol(user_context, #fn); - VULKAN_FN(vkCreateInstance) - VULKAN_FN(vkGetInstanceProcAddr) +#define HL_USE_VULKAN_LOADER_FNS +#include "vulkan_functions.h" +#undef HL_USE_VULKAN_LOADER_FNS #undef VULKAN_FN } -// Get the function pointers from the Vulkan instance for the resolved driver API methods. -void WEAK vk_load_vulkan_functions(void *user_context, VkInstance instance) { - debug(user_context) << " vk_load_vulkan_functions (user_context: " << user_context << ")\n"; +// Get the function pointers from the Vulkan loader for the resolved instance API methods. +void WEAK vk_load_vulkan_instance_functions(void *user_context, VkInstance instance) { + debug(user_context) << " vk_load_vulkan_instance_functions (user_context: " << user_context << ")\n"; #define VULKAN_FN(fn) fn = (PFN_##fn)vkGetInstanceProcAddr(instance, #fn); +#define HL_USE_VULKAN_INSTANCE_FNS +#include "vulkan_functions.h" +#undef HL_USE_VULKAN_INSTANCE_FNS +#undef VULKAN_FN +} + +// Reset the instance function pointers +void WEAK vk_unload_vulkan_instance_functions(void *user_context) { +#define VULKAN_FN(fn) fn = (PFN_##fn)(nullptr); +#define HL_USE_VULKAN_INSTANCE_FNS +#include "vulkan_functions.h" +#undef HL_USE_VULKAN_INSTANCE_FNS +#undef VULKAN_FN +} + +// Get the function pointers from the Vulkan instance for the resolved driver API methods. +void WEAK vk_load_vulkan_device_functions(void *user_context, VkDevice device) { + debug(user_context) << " vk_load_vulkan_device_functions (user_context: " << user_context << ")\n"; +#define VULKAN_FN(fn) fn = (PFN_##fn)vkGetDeviceProcAddr(device, #fn); +#define HL_USE_VULKAN_DEVICE_FNS +#include "vulkan_functions.h" +#undef HL_USE_VULKAN_DEVICE_FNS +#undef VULKAN_FN +} + +// Reset the device function pointers +void WEAK vk_unload_vulkan_device_functions(void *user_context) { +#define VULKAN_FN(fn) fn = (PFN_##fn)(nullptr); +#define HL_USE_VULKAN_DEVICE_FNS #include "vulkan_functions.h" +#undef HL_USE_VULKAN_DEVICE_FNS #undef VULKAN_FN } diff --git a/src/runtime/vulkan_internal.h b/src/runtime/vulkan_internal.h index 319d8caf63bf..aeef545385cc 100644 --- a/src/runtime/vulkan_internal.h +++ b/src/runtime/vulkan_internal.h @@ -24,6 +24,7 @@ namespace Vulkan { // Declarations class VulkanMemoryAllocator; struct VulkanShaderBinding; +struct VulkanCompiledShaderModule; struct VulkanCompilationCacheEntry; // -------------------------------------------------------------------------- @@ -42,7 +43,7 @@ VulkanMemoryAllocator *vk_create_memory_allocator(void *user_context, VkDevice d int vk_destroy_memory_allocator(void *user_context, VulkanMemoryAllocator *allocator); int vk_clear_device_buffer(void *user_context, VulkanMemoryAllocator *allocator, - VkCommandPool command_pool, + VkCommandBuffer command_buffer, VkQueue command_queue, VkBuffer device_buffer); // -------------------------------------------------------------------------- @@ -55,7 +56,6 @@ int vk_create_context( VkInstance *instance, VkDevice *device, VkPhysicalDevice *physical_device, - VkCommandPool *command_pool, VkQueue *queue, uint32_t *queue_family_index); int vk_destroy_context( @@ -64,7 +64,6 @@ int vk_destroy_context( VkInstance instance, VkDevice device, VkPhysicalDevice physical_device, - VkCommandPool command_pool, VkQueue queue); int vk_find_compute_capability(void *user_context, int *major, int *minor); @@ -101,13 +100,12 @@ bool vk_validate_required_extension_support(void *user_context, int vk_create_command_pool(void *user_context, VulkanMemoryAllocator *allocator, uint32_t queue_index, VkCommandPool *command_pool); int vk_destroy_command_pool(void *user_context, VulkanMemoryAllocator *allocator, VkCommandPool command_pool); -// Command pools are uint64_t and zero may be valid, so use this as a sentinel for invalid -const VkCommandPool VkInvalidCommandPool(uint64_t(-1)); - // -- Command Buffer int vk_create_command_buffer(void *user_context, VulkanMemoryAllocator *allocator, VkCommandPool pool, VkCommandBuffer *command_buffer); int vk_destroy_command_buffer(void *user_context, VulkanMemoryAllocator *allocator, VkCommandPool command_pool, VkCommandBuffer command_buffer); +struct ScopedVulkanCommandBufferAndPool; + int vk_fill_command_buffer_with_dispatch_call(void *user_context, VkDevice device, VkCommandBuffer command_buffer, @@ -217,12 +215,16 @@ int vk_destroy_compute_pipeline(void *user_context, VulkanMemoryAllocator *allocator, VkPipeline compute_pipeline); +// -- Kernel Module +VulkanCompilationCacheEntry *vk_compile_kernel_module(void *user_context, VulkanMemoryAllocator *allocator, + const char *ptr, int size); + // -- Shader Module VulkanShaderBinding *vk_decode_shader_bindings(void *user_context, VulkanMemoryAllocator *allocator, const uint32_t *module_ptr, uint32_t module_size); -VulkanCompilationCacheEntry *vk_compile_shader_module(void *user_context, VulkanMemoryAllocator *allocator, - const char *src, int size); +VulkanCompiledShaderModule *vk_compile_shader_module(void *user_context, VulkanMemoryAllocator *allocator, + const char *src, int size); int vk_destroy_shader_modules(void *user_context, VulkanMemoryAllocator *allocator); @@ -232,9 +234,12 @@ int vk_do_multidimensional_copy(void *user_context, VkCommandBuffer command_buff int d, bool from_host, bool to_host); // -------------------------------------------------------------------------- -// Errors +// Debug & Errors // -------------------------------------------------------------------------- +VkResult vk_create_debug_utils_messenger(void *user_context, VkInstance instance, VulkanMemoryAllocator *allocator, VkDebugUtilsMessengerEXT *messenger); +void vk_destroy_debug_utils_messenger(void *user_context, VkInstance instance, VulkanMemoryAllocator *allocator, VkDebugUtilsMessengerEXT messenger); + // Returns the corresponding string for a given vulkan error code const char *vk_get_error_name(VkResult error) { switch (error) { diff --git a/src/runtime/vulkan_memory.h b/src/runtime/vulkan_memory.h index 190d9e022afc..9b32de9a15c0 100644 --- a/src/runtime/vulkan_memory.h +++ b/src/runtime/vulkan_memory.h @@ -23,11 +23,11 @@ WEAK const VkAllocationCallbacks *custom_allocation_callbacks = nullptr; // nul // Runtime configuration parameters to adjust the behaviour of the block allocator struct VulkanMemoryConfig { - size_t maximum_pool_size = 0; //< Maximum number of bytes to allocate for the entire pool (including all blocks). Specified in bytes. Zero means no constraint - size_t minimum_block_size = 32 * 1024 * 1024; //< Default block size is 32MB - size_t maximum_block_size = 0; //< Specified in bytes. Zero means no constraint - size_t maximum_block_count = 0; //< Maximum number of blocks to allocate. Zero means no constraint - size_t nearest_multiple = 32; //< Always round up the requested region sizes to the given integer value. Zero means no constraint + size_t maximum_pool_size = 0; //< Maximum number of bytes to allocate for the entire pool (including all blocks). Specified in bytes. Zero means no constraint + size_t minimum_block_size = 4 * 1024; //< Default block size is 4KB + size_t maximum_block_size = 0; //< Specified in bytes. Zero means no constraint + size_t maximum_block_count = 0; //< Maximum number of blocks to allocate. Zero means no constraint + size_t nearest_multiple = 32; //< Always round up the requested region sizes to the given integer value. Zero means no constraint }; WEAK VulkanMemoryConfig memory_allocator_config; @@ -103,6 +103,7 @@ class VulkanMemoryAllocator { size_t regions_allocated() const; private: + static constexpr uint32_t invalid_memory_heap = uint32_t(-1); static constexpr uint32_t invalid_usage_flags = uint32_t(-1); static constexpr uint32_t invalid_memory_type = uint32_t(VK_MAX_MEMORY_TYPES); @@ -114,10 +115,24 @@ class VulkanMemoryAllocator { uint32_t select_memory_usage(void *user_context, MemoryProperties properties) const; - uint32_t select_memory_type(void *user_context, - VkPhysicalDevice physical_device, - MemoryProperties properties, - uint32_t required_flags) const; + uint32_t preferred_flags_for_memory_properties(void *user_context, MemoryProperties properties) const; + uint32_t required_flags_for_memory_properties(void *user_context, MemoryProperties properties) const; + + bool is_valid_memory_type_for_heap(void *user_context, + VkPhysicalDeviceMemoryProperties *memory_properties, + uint32_t memory_type_index, + uint32_t heap_index) const; + + bool is_preferred_memory_type_for_flags(void *user_context, + VkPhysicalDeviceMemoryProperties *memory_properties, + uint32_t memory_type_index, + uint32_t preferred_flags, + uint32_t required_flags) const; + + bool is_valid_memory_type_for_flags(void *user_context, + VkPhysicalDeviceMemoryProperties *memory_properties, + uint32_t memory_type_index, + uint32_t required_flags) const; int lookup_requirements(void *user_context, size_t size, uint32_t usage_flags, VkMemoryRequirements *memory_requirements); @@ -554,13 +569,14 @@ int VulkanMemoryAllocator::lookup_requirements(void *user_context, size_t size, 0, nullptr}; // Create a buffer to determine alignment requirements - VkBuffer buffer = {0}; + VkBuffer buffer = VK_NULL_HANDLE; VkResult result = vkCreateBuffer(this->device, &create_info, this->alloc_callbacks, &buffer); if (result != VK_SUCCESS) { #if defined(HL_VK_DEBUG_MEM) debug(nullptr) << "VulkanMemoryAllocator: Failed to create buffer to find requirements!\n\t" << "vkCreateBuffer returned: " << vk_get_error_name(result) << "\n"; #endif + error(user_context) << "VulkanRegionAllocator: Failed to create buffer to gather memory requirements!\n"; return halide_error_code_device_malloc_failed; } @@ -573,7 +589,7 @@ int VulkanMemoryAllocator::conform_block_request(void *instance_ptr, MemoryReque VulkanMemoryAllocator *instance = reinterpret_cast(instance_ptr); if (instance == nullptr) { - return halide_error_code_internal_error; + return halide_error_code_success; } void *user_context = instance->owner_context; @@ -614,7 +630,7 @@ int VulkanMemoryAllocator::conform_block_request(void *instance_ptr, MemoryReque int VulkanMemoryAllocator::allocate_block(void *instance_ptr, MemoryBlock *block) { VulkanMemoryAllocator *instance = reinterpret_cast(instance_ptr); if (instance == nullptr) { - return halide_error_code_internal_error; + return halide_error_code_success; } void *user_context = instance->owner_context; @@ -639,32 +655,114 @@ int VulkanMemoryAllocator::allocate_block(void *instance_ptr, MemoryBlock *block << "visibility=" << halide_memory_visibility_name(block->properties.visibility) << ")\n"; #endif - // Find an appropriate memory type given the flags - uint32_t memory_type = instance->select_memory_type(user_context, instance->physical_device, block->properties, 0); - if (memory_type == invalid_memory_type) { - error(user_context) << "VulkanMemoryAllocator: Unable to find appropriate memory type for device!\n"; - return halide_error_code_generic_error; - } - - // Allocate memory - VkMemoryAllocateInfo alloc_info = { - VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // struct type - nullptr, // struct extending this - block->size, // size of allocation in bytes - memory_type // memory type index from physical device - }; - VkDeviceMemory *device_memory = (VkDeviceMemory *)vk_host_malloc(nullptr, sizeof(VkDeviceMemory), 0, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT, instance->alloc_callbacks); if (device_memory == nullptr) { - debug(nullptr) << "VulkanBlockAllocator: Unable to allocate block! Failed to allocate device memory handle!\n"; + error(user_context) << "VulkanBlockAllocator: Unable to allocate block! Failed to allocate device memory handle!\n"; return halide_error_code_out_of_memory; } - VkResult result = vkAllocateMemory(instance->device, &alloc_info, instance->alloc_callbacks, device_memory); - if (result != VK_SUCCESS) { - debug(nullptr) << "VulkanMemoryAllocator: Allocation failed! vkAllocateMemory returned: " << vk_get_error_name(result) << "\n"; + VkPhysicalDeviceMemoryProperties memory_properties; + vkGetPhysicalDeviceMemoryProperties(instance->physical_device, &memory_properties); + + // Attempt to allocate device memory with the requested flags, trying each type until successful + uint32_t selected_type = invalid_memory_type; + uint32_t preferred_flags = instance->preferred_flags_for_memory_properties(user_context, block->properties); + uint32_t required_flags = instance->required_flags_for_memory_properties(user_context, block->properties); + + // Try preferred flags first + for (uint32_t type_index = 0; type_index < memory_properties.memoryTypeCount; type_index++) { + if (!instance->is_preferred_memory_type_for_flags(user_context, &memory_properties, type_index, preferred_flags, required_flags)) { + continue; + } + + // Allocate memory + VkMemoryAllocateInfo alloc_info = { + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // struct type + nullptr, // struct extending this + block->size, // size of allocation in bytes + type_index // memory type index from physical device + }; + + VkResult result = vkAllocateMemory(instance->device, &alloc_info, instance->alloc_callbacks, device_memory); + if (result == VK_SUCCESS) { + selected_type = type_index; + break; + } + +#if defined(HL_VK_DEBUG_MEM) + debug(user_context) << "VulkanMemoryAllocator: Allocation request for preferred flags was not successful (\n" + << "\tblock=" << (void *)(block) << "\n" + << "\ttype_index=" << type_index << "\n" + << "\tsize=" << (uint64_t)block->size << "\n" + << "\tdedicated=" << (block->dedicated ? "true" : "false") << "\n" + << "\tusage=" << halide_memory_usage_name(block->properties.usage) << "\n" + << "\tcaching=" << halide_memory_caching_name(block->properties.caching) << "\n" + << "\tvisibility=" << halide_memory_visibility_name(block->properties.visibility) << "\n" + << "\tblocks_allocated=" << (uint64_t)instance->blocks_allocated() << "\n" + << "\tbytes_allocated_for_blocks=" << (uint64_t)instance->bytes_allocated_for_blocks() << "\n" + << "\tregions_allocated=" << (uint64_t)instance->regions_allocated() << "\n" + << "\tbytes_allocated_for_regions=" << (uint64_t)instance->bytes_allocated_for_regions() << "\n)\n" + << "vkAllocateMemory returned: " + << vk_get_error_name(result) << "\n"; +#endif + } + + // try any valid type on any valid pool + if (selected_type == invalid_memory_type) { + for (uint32_t type_index = 0; type_index < memory_properties.memoryTypeCount; type_index++) { + if (!instance->is_valid_memory_type_for_flags(user_context, &memory_properties, type_index, required_flags)) { + continue; + } + + // Allocate memory + VkMemoryAllocateInfo alloc_info = { + VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // struct type + nullptr, // struct extending this + block->size, // size of allocation in bytes + type_index // memory type index from physical device + }; + + VkResult result = vkAllocateMemory(instance->device, &alloc_info, instance->alloc_callbacks, device_memory); + if (result == VK_SUCCESS) { + selected_type = type_index; + break; + } +#if defined(HL_VK_DEBUG_MEM) + debug(user_context) << "VulkanMemoryAllocator: Allocation request for valid flags was not successful (\n" + << "\tblock=" << (void *)(block) << "\n" + << "\ttype_index=" << type_index << "\n" + << "\tsize=" << (uint64_t)block->size << "\n" + << "\tdedicated=" << (block->dedicated ? "true" : "false") << "\n" + << "\tusage=" << halide_memory_usage_name(block->properties.usage) << "\n" + << "\tcaching=" << halide_memory_caching_name(block->properties.caching) << "\n" + << "\tvisibility=" << halide_memory_visibility_name(block->properties.visibility) << "\n" + << "\tblocks_allocated=" << (uint64_t)instance->blocks_allocated() << "\n" + << "\tbytes_allocated_for_blocks=" << (uint64_t)instance->bytes_allocated_for_blocks() << "\n" + << "\tregions_allocated=" << (uint64_t)instance->regions_allocated() << "\n" + << "\tbytes_allocated_for_regions=" << (uint64_t)instance->bytes_allocated_for_regions() << "\n)\n" + << "vkAllocateMemory returned: " + << vk_get_error_name(result) << "\n"; +#endif + } + } + + if (selected_type == invalid_memory_type) { + vk_host_free(nullptr, device_memory, instance->alloc_callbacks); + device_memory = VK_NULL_HANDLE; + error(user_context) << "VulkanMemoryAllocator: Allocation failed for block (\n" + << "\tblock=" << (void *)(block) << "\n" + << "\tsize=" << (uint64_t)block->size << "\n" + << "\tdedicated=" << (block->dedicated ? "true" : "false") << "\n" + << "\tusage=" << halide_memory_usage_name(block->properties.usage) << "\n" + << "\tcaching=" << halide_memory_caching_name(block->properties.caching) << "\n" + << "\tvisibility=" << halide_memory_visibility_name(block->properties.visibility) << "\n" + << "\tblocks_allocated=" << (uint64_t)instance->blocks_allocated() << "\n" + << "\tbytes_allocated_for_blocks=" << (uint64_t)instance->bytes_allocated_for_blocks() << "\n" + << "\tregions_allocated=" << (uint64_t)instance->regions_allocated() << "\n" + << "\tbytes_allocated_for_regions=" << (uint64_t)instance->bytes_allocated_for_regions() << "\n)\n"; return halide_error_code_device_malloc_failed; } + #ifdef DEBUG_RUNTIME debug(nullptr) << "vkAllocateMemory: Allocated memory for device region (" << (uint64_t)block->size << " bytes) ...\n"; #endif @@ -678,7 +776,7 @@ int VulkanMemoryAllocator::allocate_block(void *instance_ptr, MemoryBlock *block int VulkanMemoryAllocator::deallocate_block(void *instance_ptr, MemoryBlock *block) { VulkanMemoryAllocator *instance = reinterpret_cast(instance_ptr); if (instance == nullptr) { - return halide_error_code_internal_error; + return halide_error_code_success; } void *user_context = instance->owner_context; @@ -707,9 +805,9 @@ int VulkanMemoryAllocator::deallocate_block(void *instance_ptr, MemoryBlock *blo << "visibility=" << halide_memory_visibility_name(block->properties.visibility) << ")\n"; #endif + // Nothing to do if device memory was already freed if (block->handle == nullptr) { - error(user_context) << "VulkanBlockAllocator: Unable to deallocate block! Invalid handle!\n"; - return halide_error_code_internal_error; + return halide_error_code_success; } VkDeviceMemory *device_memory = reinterpret_cast(block->handle); @@ -751,49 +849,67 @@ size_t VulkanMemoryAllocator::bytes_allocated_for_blocks() const { return block_byte_count; } -uint32_t VulkanMemoryAllocator::select_memory_type(void *user_context, - VkPhysicalDevice physical_device, - MemoryProperties properties, - uint32_t required_flags) const { +uint32_t VulkanMemoryAllocator::required_flags_for_memory_properties( + void *user_context, MemoryProperties properties) const { - uint32_t want_flags = 0; //< preferred memory flags for requested access type - uint32_t need_flags = 0; //< must have in order to enable requested access + uint32_t required_flags = 0; //< must have in order to enable requested access switch (properties.visibility) { - case MemoryVisibility::HostOnly: - want_flags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; - break; case MemoryVisibility::DeviceOnly: - need_flags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + required_flags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; break; case MemoryVisibility::DeviceToHost: - need_flags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; - want_flags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + required_flags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; break; case MemoryVisibility::HostToDevice: - need_flags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + required_flags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; break; case MemoryVisibility::DefaultVisibility: + case MemoryVisibility::HostOnly: + break; case MemoryVisibility::InvalidVisibility: - default: - error(nullptr) << "VulkanMemoryAllocator: Unable to convert type! Invalid memory visibility request!\n\t" - << "visibility=" << halide_memory_visibility_name(properties.visibility) << "\n"; + error(user_context) << "VulkanMemoryAllocator: Unable to convert type! Invalid memory visibility request!\n\t" + << "caching=" << halide_memory_caching_name(properties.caching) << "\n"; return invalid_memory_type; }; + return required_flags; +} +uint32_t VulkanMemoryAllocator::preferred_flags_for_memory_properties( + void *user_context, MemoryProperties properties) const { + + uint32_t preferred_flags = 0; //< preferred memory flags for requested access type + switch (properties.visibility) { + case MemoryVisibility::HostOnly: + preferred_flags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT; + break; + case MemoryVisibility::DeviceToHost: + preferred_flags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT; + break; + case MemoryVisibility::HostToDevice: + case MemoryVisibility::DefaultVisibility: + case MemoryVisibility::DeviceOnly: + break; + case MemoryVisibility::InvalidVisibility: + error(user_context) << "VulkanMemoryAllocator: Unable to convert type! Invalid memory visibility request!\n\t" + << "caching=" << halide_memory_caching_name(properties.caching) << "\n"; + return invalid_memory_type; + }; + + uint32_t required_flags = required_flags_for_memory_properties(user_context, properties); switch (properties.caching) { case MemoryCaching::CachedCoherent: - if (need_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { - want_flags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + if (required_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { + preferred_flags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; } break; case MemoryCaching::UncachedCoherent: - if (need_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { - want_flags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; + if (required_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { + preferred_flags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; } break; case MemoryCaching::Cached: - if (need_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { - want_flags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; + if (required_flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) { + preferred_flags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT; } break; case MemoryCaching::Uncached: @@ -805,46 +921,61 @@ uint32_t VulkanMemoryAllocator::select_memory_type(void *user_context, << "caching=" << halide_memory_caching_name(properties.caching) << "\n"; return invalid_memory_type; }; + return preferred_flags; +} + +bool VulkanMemoryAllocator::is_valid_memory_type_for_heap(void *user_context, + VkPhysicalDeviceMemoryProperties *memory_properties, + uint32_t memory_type_index, + uint32_t heap_index) const { - VkPhysicalDeviceMemoryProperties device_memory_properties; - vkGetPhysicalDeviceMemoryProperties(physical_device, &device_memory_properties); + if (memory_type_index < memory_properties->memoryTypeCount) { + if (memory_properties->memoryTypes[memory_type_index].heapIndex == heap_index) { + return true; + } + } + return false; +} - uint32_t result = invalid_memory_type; - for (uint32_t i = 0; i < device_memory_properties.memoryTypeCount; ++i) { +bool VulkanMemoryAllocator::is_valid_memory_type_for_flags(void *user_context, + VkPhysicalDeviceMemoryProperties *memory_properties, + uint32_t memory_type_index, + uint32_t required_flags) const { - // if required flags are given, see if the memory type matches the requirement + if (memory_type_index < memory_properties->memoryTypeCount) { + const VkMemoryPropertyFlags property_flags = memory_properties->memoryTypes[memory_type_index].propertyFlags; if (required_flags) { - if (((required_flags >> i) & 1) == 0) { - continue; + if ((property_flags & required_flags) != required_flags) { + return false; } } + return true; + } + return false; +} - const VkMemoryPropertyFlags properties = device_memory_properties.memoryTypes[i].propertyFlags; - if (need_flags) { - if ((properties & need_flags) != need_flags) { - continue; +bool VulkanMemoryAllocator::is_preferred_memory_type_for_flags(void *user_context, + VkPhysicalDeviceMemoryProperties *memory_properties, + uint32_t memory_type_index, + uint32_t preferred_flags, + uint32_t required_flags) const { + + if (memory_type_index < memory_properties->memoryTypeCount) { + const VkMemoryPropertyFlags property_flags = memory_properties->memoryTypes[memory_type_index].propertyFlags; + if (required_flags) { + if ((property_flags & required_flags) != required_flags) { + return false; } } - if (want_flags) { - if ((properties & want_flags) != want_flags) { - continue; + if (preferred_flags) { + if ((property_flags & preferred_flags) != preferred_flags) { + return false; } } - - result = i; - break; + return true; } - - if (result == invalid_memory_type) { - error(user_context) << "VulkanBlockAllocator: Failed to find appropriate memory type for given properties:\n\t" - << "usage=" << halide_memory_usage_name(properties.usage) << " " - << "caching=" << halide_memory_caching_name(properties.caching) << " " - << "visibility=" << halide_memory_visibility_name(properties.visibility) << "\n"; - return invalid_memory_type; - } - - return result; + return false; } // -- @@ -913,7 +1044,7 @@ int VulkanMemoryAllocator::conform_region_request(void *instance_ptr, MemoryRequ VulkanMemoryAllocator *instance = reinterpret_cast(instance_ptr); if (instance == nullptr) { - return halide_error_code_internal_error; + return halide_error_code_success; } void *user_context = instance->owner_context; @@ -945,7 +1076,7 @@ int VulkanMemoryAllocator::allocate_region(void *instance_ptr, MemoryRegion *reg VulkanMemoryAllocator *instance = reinterpret_cast(instance_ptr); if (instance == nullptr) { - return halide_error_code_internal_error; + return halide_error_code_success; } void *user_context = instance->owner_context; @@ -994,6 +1125,13 @@ int VulkanMemoryAllocator::allocate_region(void *instance_ptr, MemoryRegion *reg VkResult result = vkCreateBuffer(instance->device, &create_info, instance->alloc_callbacks, buffer); if (result != VK_SUCCESS) { + // Allocation failed ... collect unused regions and try again ... + instance->collect(user_context); + result = vkCreateBuffer(instance->device, &create_info, instance->alloc_callbacks, buffer); + } + if (result != VK_SUCCESS) { + vk_host_free(nullptr, buffer, instance->alloc_callbacks); + buffer = nullptr; error(user_context) << "VulkanRegionAllocator: Failed to create buffer!\n\t" << "vkCreateBuffer returned: " << vk_get_error_name(result) << "\n"; return halide_error_code_device_malloc_failed; @@ -1069,7 +1207,7 @@ int VulkanMemoryAllocator::allocate_region(void *instance_ptr, MemoryRegion *reg int VulkanMemoryAllocator::deallocate_region(void *instance_ptr, MemoryRegion *region) { VulkanMemoryAllocator *instance = reinterpret_cast(instance_ptr); if (instance == nullptr) { - return halide_error_code_internal_error; + return halide_error_code_success; } void *user_context = instance->owner_context; @@ -1114,25 +1252,26 @@ int VulkanMemoryAllocator::deallocate_region(void *instance_ptr, MemoryRegion *r #ifdef DEBUG_RUNTIME debug(nullptr) << "vkDestroyBuffer: Destroyed buffer for device region (" << (uint64_t)region->size << " bytes) ...\n"; #endif + halide_error_code_t error_code = halide_error_code_success; region->handle = nullptr; if (instance->region_count > 0) { instance->region_count--; } else { - error(nullptr) << "VulkanRegionAllocator: Region counter invalid ... reseting to zero!\n"; + error(user_context) << "VulkanRegionAllocator: Region counter invalid ... reseting to zero!\n"; instance->region_count = 0; - return halide_error_code_internal_error; + error_code = halide_error_code_internal_error; } if (int64_t(instance->region_byte_count) - int64_t(region->size) >= 0) { instance->region_byte_count -= region->size; } else { - error(nullptr) << "VulkanRegionAllocator: Region byte counter invalid ... reseting to zero!\n"; + error(user_context) << "VulkanRegionAllocator: Region byte counter invalid ... reseting to zero!\n"; instance->region_byte_count = 0; - return halide_error_code_internal_error; + error_code = halide_error_code_internal_error; } vk_host_free(nullptr, buffer, instance->alloc_callbacks); buffer = nullptr; - return halide_error_code_success; + return error_code; } size_t VulkanMemoryAllocator::regions_allocated() const { @@ -1260,6 +1399,7 @@ VulkanMemoryAllocator *vk_create_memory_allocator(void *user_context, int vk_destroy_memory_allocator(void *user_context, VulkanMemoryAllocator *allocator) { if (allocator != nullptr) { + allocator->collect(user_context); VulkanMemoryAllocator::destroy(user_context, allocator); allocator = nullptr; } @@ -1270,7 +1410,7 @@ int vk_destroy_memory_allocator(void *user_context, VulkanMemoryAllocator *alloc int vk_clear_device_buffer(void *user_context, VulkanMemoryAllocator *allocator, - VkCommandPool command_pool, + VkCommandBuffer command_buffer, VkQueue command_queue, VkBuffer device_buffer) { @@ -1278,19 +1418,11 @@ int vk_clear_device_buffer(void *user_context, debug(user_context) << " vk_clear_device_buffer (user_context: " << user_context << ", " << "allocator: " << (void *)allocator << ", " - << "command_pool: " << (void *)command_pool << ", " + << "command_buffer: " << (void *)command_buffer << ", " << "command_queue: " << (void *)command_queue << ", " << "device_buffer: " << (void *)device_buffer << ")\n"; #endif - // create a command buffer - VkCommandBuffer command_buffer; - int error_code = vk_create_command_buffer(user_context, allocator, command_pool, &command_buffer); - if (error_code != halide_error_code_success) { - error(user_context) << "Vulkan: Failed to create command buffer!\n"; - return error_code; - } - // begin the command buffer VkCommandBufferBeginInfo command_buffer_begin_info = { @@ -1330,7 +1462,7 @@ int vk_clear_device_buffer(void *user_context, nullptr // the semaphores to signal }; - result = vkQueueSubmit(command_queue, 1, &submit_info, 0); + result = vkQueueSubmit(command_queue, 1, &submit_info, VK_NULL_HANDLE); if (result != VK_SUCCESS) { error(user_context) << "Vulkan: vkQueueSubmit returned " << vk_get_error_name(result) << "\n"; return halide_error_code_generic_error; @@ -1343,12 +1475,6 @@ int vk_clear_device_buffer(void *user_context, return halide_error_code_generic_error; } - error_code = vk_destroy_command_buffer(user_context, allocator, command_pool, command_buffer); - if (error_code != halide_error_code_success) { - error(user_context) << "Vulkan: Failed to destroy command buffer!\n"; - return error_code; - } - return halide_error_code_success; } diff --git a/src/runtime/vulkan_resources.h b/src/runtime/vulkan_resources.h index f28548700457..1e696110b5c9 100644 --- a/src/runtime/vulkan_resources.h +++ b/src/runtime/vulkan_resources.h @@ -43,9 +43,9 @@ struct VulkanSharedMemoryAllocation { struct VulkanShaderBinding { const char *entry_point_name = nullptr; VulkanDispatchData dispatch_data = {}; - VkDescriptorPool descriptor_pool = {0}; - VkDescriptorSet descriptor_set = {0}; - VkPipeline compute_pipeline = {0}; + VkDescriptorPool descriptor_pool = VK_NULL_HANDLE; + VkDescriptorSet descriptor_set = VK_NULL_HANDLE; + VkPipeline compute_pipeline = VK_NULL_HANDLE; uint32_t uniform_buffer_count = 0; uint32_t storage_buffer_count = 0; uint32_t specialization_constants_count = 0; @@ -56,15 +56,22 @@ struct VulkanShaderBinding { MemoryRegion *args_region = nullptr; }; -// Compilation cache for compiled shader modules -struct VulkanCompilationCacheEntry { - VkShaderModule shader_module = {0}; +// Compiled shader module and associated bindings +struct VulkanCompiledShaderModule { + VkShaderModule shader_module = VK_NULL_HANDLE; VkDescriptorSetLayout *descriptor_set_layouts = nullptr; - VkPipelineLayout pipeline_layout = {0}; + VkPipelineLayout pipeline_layout = VK_NULL_HANDLE; uint32_t shader_count = 0; VulkanShaderBinding *shader_bindings = nullptr; }; +// Compilation cache for compiled shader modules +struct VulkanCompilationCacheEntry { + VulkanMemoryAllocator *allocator = nullptr; + VulkanCompiledShaderModule **compiled_modules = nullptr; + uint32_t module_count = 0; +}; + WEAK Halide::Internal::GPUCompilationCache compilation_cache; // -------------------------------------------------------------------------- @@ -113,10 +120,6 @@ int vk_destroy_command_pool(void *user_context, VulkanMemoryAllocator *allocator error(user_context) << "Vulkan: Failed to destroy command pool ... invalid allocator pointer!\n"; return halide_error_code_generic_error; } - if (command_pool == VkInvalidCommandPool) { - debug(user_context) << "Vulkan: Command pool already destroyed.\n"; - return halide_error_code_success; - } vkResetCommandPool(allocator->current_device(), command_pool, VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT); vkDestroyCommandPool(allocator->current_device(), command_pool, allocator->callbacks()); return halide_error_code_success; @@ -170,6 +173,35 @@ int vk_destroy_command_buffer(void *user_context, VulkanMemoryAllocator *allocat return halide_error_code_success; } +// Struct for handling destruction of a transient command buffer ... gets destroyed when object goes out of scope +struct ScopedVulkanCommandBufferAndPool { + void *user_context = nullptr; + VulkanMemoryAllocator *allocator = nullptr; + VkCommandPool command_pool = VK_NULL_HANDLE; + VkCommandBuffer command_buffer = VK_NULL_HANDLE; + int error_code = halide_error_code_success; + + ScopedVulkanCommandBufferAndPool(void *uc, VulkanMemoryAllocator *vma, uint32_t queue_family_index) + : user_context(uc), allocator(vma) { + error_code = vk_create_command_pool(user_context, allocator, queue_family_index, &command_pool); + if (error_code == halide_error_code_success) { + error_code = vk_create_command_buffer(user_context, allocator, command_pool, &command_buffer); + } + } + ~ScopedVulkanCommandBufferAndPool() { + if ((allocator != nullptr) && (command_pool != VK_NULL_HANDLE)) { + if (command_buffer != VK_NULL_HANDLE) { + vk_destroy_command_buffer(user_context, allocator, command_pool, command_buffer); + } + vk_destroy_command_pool(user_context, allocator, command_pool); + } + user_context = nullptr; + allocator = nullptr; + command_pool = VK_NULL_HANDLE; + command_buffer = VK_NULL_HANDLE; + } +}; + int vk_fill_command_buffer_with_dispatch_call(void *user_context, VkDevice device, VkCommandBuffer command_buffer, @@ -238,7 +270,7 @@ int vk_submit_command_buffer(void *user_context, VkQueue queue, VkCommandBuffer nullptr // the semaphores to signal }; - VkResult result = vkQueueSubmit(queue, 1, &submit_info, 0); + VkResult result = vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE); if (result != VK_SUCCESS) { error(user_context) << "Vulkan: vkQueueSubmit returned " << vk_get_error_name(result) << "\n"; return halide_error_code_generic_error; @@ -840,11 +872,11 @@ int vk_create_compute_pipeline(void *user_context, specialization_info, // pointer to VkSpecializationInfo struct }, pipeline_layout, // pipeline layout - 0, // base pipeline handle for derived pipeline + VK_NULL_HANDLE, // base pipeline handle for derived pipeline 0 // base pipeline index for derived pipeline }; - VkResult result = vkCreateComputePipelines(allocator->current_device(), 0, 1, &compute_pipeline_info, allocator->callbacks(), compute_pipeline); + VkResult result = vkCreateComputePipelines(allocator->current_device(), VK_NULL_HANDLE, 1, &compute_pipeline_info, allocator->callbacks(), compute_pipeline); if (result != VK_SUCCESS) { error(user_context) << "Vulkan: Failed to create compute pipeline! vkCreateComputePipelines returned " << vk_get_error_name(result) << "\n"; return halide_error_code_generic_error; @@ -1021,7 +1053,7 @@ int vk_setup_compute_pipeline(void *user_context, error(user_context) << "Vulkan: Failed to destroy compute pipeline!\n"; return halide_error_code_generic_error; } - shader_bindings->compute_pipeline = {0}; + shader_bindings->compute_pipeline = VK_NULL_HANDLE; } int error_code = vk_create_compute_pipeline(user_context, allocator, entry_point_name, shader_module, pipeline_layout, &specialization_info, &(shader_bindings->compute_pipeline)); @@ -1033,7 +1065,7 @@ int vk_setup_compute_pipeline(void *user_context, } else { // Construct and re-use the fixed pipeline - if (shader_bindings->compute_pipeline == 0) { + if (shader_bindings->compute_pipeline == VK_NULL_HANDLE) { int error_code = vk_create_compute_pipeline(user_context, allocator, entry_point_name, shader_module, pipeline_layout, nullptr, &(shader_bindings->compute_pipeline)); if (error_code != halide_error_code_success) { error(user_context) << "Vulkan: Failed to create compute pipeline!\n"; @@ -1339,8 +1371,123 @@ int vk_validate_shader_for_device(void *user_context, VulkanMemoryAllocator *all return halide_error_code_success; } -VulkanCompilationCacheEntry *vk_compile_shader_module(void *user_context, VulkanMemoryAllocator *allocator, +VulkanCompilationCacheEntry *vk_compile_kernel_module(void *user_context, VulkanMemoryAllocator *allocator, const char *ptr, int size) { +#ifdef DEBUG_RUNTIME + debug(user_context) + << " vk_compile_kernel_module (user_context: " << user_context << ", " + << "allocator: " << (void *)allocator << ", " + << "device: " << (void *)allocator->current_device() << ", " + << "module: " << (void *)ptr << ", " + << "size: " << size << ")\n"; + + uint64_t t_before = halide_current_time_ns(user_context); +#endif + + if (allocator == nullptr) { + debug(user_context) << "Vulkan: Failed to compile kernel module ... invalid allocator pointer!\n"; + return nullptr; + } + + if ((ptr == nullptr) || (size <= 0)) { + debug(user_context) << "Vulkan: Failed to compile kernel module ... invalid module!\n"; + return nullptr; + } + + VkSystemAllocationScope alloc_scope = VkSystemAllocationScope::VK_SYSTEM_ALLOCATION_SCOPE_OBJECT; + VulkanCompilationCacheEntry *cache_entry = (VulkanCompilationCacheEntry *)vk_host_malloc(user_context, sizeof(VulkanCompilationCacheEntry), 0, alloc_scope, allocator->callbacks()); + if (cache_entry == nullptr) { + debug(user_context) << "Vulkan: Failed to allocate compilation cache entry! Out of memory!\n"; + return nullptr; + } + memset(cache_entry, 0, sizeof(VulkanCompilationCacheEntry)); + + // Decode the header and the kernel modules + uint32_t word_offset = 0; + const uint32_t *module_header = (const uint32_t *)(ptr); + if ((size_t)size < sizeof(uint32_t)) { + debug(user_context) << "Vulkan: Code module size is invalid!\n"; + return nullptr; + } + + // Extract the number of kernels from the module header + uint32_t kernel_count = module_header[word_offset++]; + debug(user_context) << " kernel_count=" << kernel_count << "\n"; + + // Allocate enough space to store the compiled modules + cache_entry->compiled_modules = (VulkanCompiledShaderModule **)vk_host_malloc(user_context, sizeof(VulkanCompiledShaderModule *) * kernel_count, 0, alloc_scope, allocator->callbacks()); + if (cache_entry->compiled_modules == nullptr) { + debug(user_context) << "Vulkan: Failed to allocate host memory!\n"; + return nullptr; + } + cache_entry->module_count = kernel_count; + cache_entry->allocator = allocator; + + // Allocate a temp buffer to decode the binary sizes of each "SPIR-V Module" + uint32_t *binary_sizes = (uint32_t *)vk_system_malloc(user_context, sizeof(uint32_t) * kernel_count); + if (binary_sizes == nullptr) { + debug(user_context) << "Vulkan: Failed to allocate system memory!\n"; + return nullptr; + } + + // Extract the size of each "SPIR-V Module" for each kernel + size_t byte_offset = 0; + for (uint32_t i = 0; (i < kernel_count) && (byte_offset < (size_t)size); ++i) { + // Extract binary size + binary_sizes[i] = module_header[word_offset++]; + + // Skip past the kernel name + uint32_t kernel_name_entry_size = module_header[word_offset++]; + const char *kernel_name = (const char *)(module_header + word_offset); + word_offset += kernel_name_entry_size; + + // Compute byte offset for loop range check + byte_offset = (word_offset * sizeof(uint32_t)); + debug(user_context) << " kernel[" << i << "] name: " << kernel_name << " binary_size: " << binary_sizes[i] << " bytes\n"; + } + + // Compile each "SPIR-V Module" for each kernel + halide_error_code_t error_code = halide_error_code_success; + for (uint32_t i = 0; (i < kernel_count) && (byte_offset < (size_t)size); ++i) { + + // Skip the header and determine the start address of the "SPIR-V Module" + const uint32_t *spirv_ptr = (const uint32_t *)(ptr + byte_offset); + size_t spirv_size = binary_sizes[i]; + + debug(user_context) << " spirv_size[" << i << "] = " << spirv_size << " bytes\n"; + debug(user_context) << " spirv_ptr[" << i << "] = " << spirv_ptr << "\n"; + + // Compile the "SPIR-V Module" for the kernel + cache_entry->compiled_modules[i] = vk_compile_shader_module(user_context, allocator, (const char *)spirv_ptr, (int)spirv_size); + if (cache_entry->compiled_modules[i] == nullptr) { + debug(user_context) << "Vulkan: Failed to compile shader module!\n"; + error_code = halide_error_code_generic_error; + } + + // Skip to the next "SPIR-V Module" + byte_offset += binary_sizes[i]; + } + + // Free temp buffer + vk_system_free(user_context, binary_sizes); + + // Cleanup if compile failed + if (error_code != halide_error_code_success) { + vk_host_free(user_context, cache_entry->compiled_modules, allocator->callbacks()); + vk_host_free(user_context, cache_entry, allocator->callbacks()); + cache_entry = nullptr; + } + +#ifdef DEBUG_RUNTIME + uint64_t t_after = halide_current_time_ns(user_context); + debug(user_context) << " Time: " << (t_after - t_before) / 1.0e6 << " ms\n"; +#endif + + return cache_entry; +} + +VulkanCompiledShaderModule *vk_compile_shader_module(void *user_context, VulkanMemoryAllocator *allocator, + const char *ptr, int size) { #ifdef DEBUG_RUNTIME debug(user_context) << " vk_compile_shader_module (user_context: " << user_context << ", " @@ -1394,12 +1541,12 @@ VulkanCompilationCacheEntry *vk_compile_shader_module(void *user_context, Vulkan }; VkSystemAllocationScope alloc_scope = VkSystemAllocationScope::VK_SYSTEM_ALLOCATION_SCOPE_OBJECT; - VulkanCompilationCacheEntry *cache_entry = (VulkanCompilationCacheEntry *)vk_host_malloc(user_context, sizeof(VulkanCompilationCacheEntry), 0, alloc_scope, allocator->callbacks()); - if (cache_entry == nullptr) { + VulkanCompiledShaderModule *compiled_module = (VulkanCompiledShaderModule *)vk_host_malloc(user_context, sizeof(VulkanCompiledShaderModule), 0, alloc_scope, allocator->callbacks()); + if (compiled_module == nullptr) { error(user_context) << "Vulkan: Failed to allocate compilation cache entry! Out of memory!\n"; return nullptr; } - memset(cache_entry, 0, sizeof(VulkanCompilationCacheEntry)); + memset(compiled_module, 0, sizeof(VulkanCompiledShaderModule)); // decode the entry point data and extract the shader bindings VulkanShaderBinding *decoded_bindings = vk_decode_shader_bindings(user_context, allocator, module_ptr, module_size); @@ -1411,31 +1558,31 @@ VulkanCompilationCacheEntry *vk_compile_shader_module(void *user_context, Vulkan // validate that the compiled shader can be executed by the device with the requested resources int valid_status = vk_validate_shader_for_device(user_context, allocator, decoded_bindings, shader_count); if (valid_status != halide_error_code_success) { - vk_host_free(user_context, cache_entry->shader_bindings, allocator->callbacks()); - vk_host_free(user_context, cache_entry, allocator->callbacks()); + vk_host_free(user_context, compiled_module->shader_bindings, allocator->callbacks()); + vk_host_free(user_context, compiled_module, allocator->callbacks()); return nullptr; } // save the shader bindings in the cache entry - cache_entry->shader_bindings = decoded_bindings; - cache_entry->shader_count = shader_count; + compiled_module->shader_bindings = decoded_bindings; + compiled_module->shader_count = shader_count; - VkResult result = vkCreateShaderModule(allocator->current_device(), &shader_info, allocator->callbacks(), &cache_entry->shader_module); + VkResult result = vkCreateShaderModule(allocator->current_device(), &shader_info, allocator->callbacks(), &compiled_module->shader_module); if ((result != VK_SUCCESS)) { error(user_context) << "Vulkan: vkCreateShaderModule Failed! Error returned: " << vk_get_error_name(result) << "\n"; - vk_host_free(user_context, cache_entry->shader_bindings, allocator->callbacks()); - vk_host_free(user_context, cache_entry, allocator->callbacks()); + vk_host_free(user_context, compiled_module->shader_bindings, allocator->callbacks()); + vk_host_free(user_context, compiled_module, allocator->callbacks()); return nullptr; } // allocate an array for storing the descriptor set layouts - if (cache_entry->shader_count) { - cache_entry->descriptor_set_layouts = (VkDescriptorSetLayout *)vk_host_malloc(user_context, cache_entry->shader_count * sizeof(VkDescriptorSetLayout), 0, alloc_scope, allocator->callbacks()); - if (cache_entry->descriptor_set_layouts == nullptr) { + if (compiled_module->shader_count) { + compiled_module->descriptor_set_layouts = (VkDescriptorSetLayout *)vk_host_malloc(user_context, compiled_module->shader_count * sizeof(VkDescriptorSetLayout), 0, alloc_scope, allocator->callbacks()); + if (compiled_module->descriptor_set_layouts == nullptr) { error(user_context) << "Vulkan: Failed to allocate descriptor set layouts for cache entry! Out of memory!\n"; return nullptr; } - memset(cache_entry->descriptor_set_layouts, 0, cache_entry->shader_count * sizeof(VkDescriptorSetLayout)); + memset(compiled_module->descriptor_set_layouts, 0, compiled_module->shader_count * sizeof(VkDescriptorSetLayout)); } #ifdef DEBUG_RUNTIME @@ -1443,91 +1590,112 @@ VulkanCompilationCacheEntry *vk_compile_shader_module(void *user_context, Vulkan debug(user_context) << " Time: " << (t_after - t_before) / 1.0e6 << " ms\n"; #endif - return cache_entry; + return compiled_module; } -int vk_destroy_shader_modules(void *user_context, VulkanMemoryAllocator *allocator) { - +void vk_destroy_compiled_shader_module(VulkanCompiledShaderModule *shader_module, VulkanMemoryAllocator *allocator) { + void *user_context = nullptr; #ifdef DEBUG_RUNTIME debug(user_context) - << " vk_destroy_shader_modules (user_context: " << user_context << ", " - << "allocator: " << (void *)allocator << ", " - << "device: " << (void *)allocator->current_device() << ")\n"; - - uint64_t t_before = halide_current_time_ns(user_context); + << " vk_destroy_compiled_shader_module (shader_module: " + << shader_module << ", allocator: " << allocator << ")\n"; #endif - if (allocator == nullptr) { - error(user_context) << "Vulkan: Failed to destroy shader modules ... invalid allocator pointer!\n"; - return halide_error_code_generic_error; + if (shader_module == nullptr) { + return; } - // Functor to match compilation cache destruction call with scoped params - struct DestroyShaderModule { - void *user_context = nullptr; - VulkanMemoryAllocator *allocator = nullptr; + if (allocator == nullptr) { + return; + } - DestroyShaderModule(void *ctx, VulkanMemoryAllocator *allocator) - : user_context(ctx), allocator(allocator) { + if (shader_module->descriptor_set_layouts) { + for (uint32_t n = 0; n < shader_module->shader_count; n++) { + debug(user_context) << " destroying descriptor set layout [" << n << "] " << shader_module->shader_bindings[n].entry_point_name << "\n"; + vk_destroy_descriptor_set_layout(user_context, allocator, shader_module->descriptor_set_layouts[n]); + shader_module->descriptor_set_layouts[n] = VK_NULL_HANDLE; } + vk_host_free(user_context, shader_module->descriptor_set_layouts, allocator->callbacks()); + shader_module->descriptor_set_layouts = nullptr; + } + if (shader_module->pipeline_layout) { + debug(user_context) << " destroying pipeline layout " << (void *)shader_module->pipeline_layout << "\n"; + vk_destroy_pipeline_layout(user_context, allocator, shader_module->pipeline_layout); + shader_module->pipeline_layout = VK_NULL_HANDLE; + } - void operator()(VulkanCompilationCacheEntry *cache_entry) { - if (cache_entry != nullptr) { - if (cache_entry->descriptor_set_layouts) { - for (uint32_t n = 0; n < cache_entry->shader_count; n++) { - debug(user_context) << " destroying descriptor set layout [" << n << "] " << cache_entry->shader_bindings[n].entry_point_name << "\n"; - vk_destroy_descriptor_set_layout(user_context, allocator, cache_entry->descriptor_set_layouts[n]); - cache_entry->descriptor_set_layouts[n] = {0}; - } - vk_host_free(user_context, cache_entry->descriptor_set_layouts, allocator->callbacks()); - cache_entry->descriptor_set_layouts = nullptr; - } - if (cache_entry->pipeline_layout) { - debug(user_context) << " destroying pipeline layout " << (void *)cache_entry->pipeline_layout << "\n"; - vk_destroy_pipeline_layout(user_context, allocator, cache_entry->pipeline_layout); - cache_entry->pipeline_layout = {0}; - } - if (cache_entry->shader_bindings) { - for (uint32_t n = 0; n < cache_entry->shader_count; n++) { - if (cache_entry->shader_bindings[n].args_region) { - vk_destroy_scalar_uniform_buffer(user_context, allocator, cache_entry->shader_bindings[n].args_region); - cache_entry->shader_bindings[n].args_region = nullptr; - } - if (cache_entry->shader_bindings[n].descriptor_pool) { - vk_destroy_descriptor_pool(user_context, allocator, cache_entry->shader_bindings[n].descriptor_pool); - cache_entry->shader_bindings[n].descriptor_pool = {0}; - } - if (cache_entry->shader_bindings[n].specialization_constants) { - vk_host_free(user_context, cache_entry->shader_bindings[n].specialization_constants, allocator->callbacks()); - cache_entry->shader_bindings[n].specialization_constants = nullptr; - } - if (cache_entry->shader_bindings[n].shared_memory_allocations) { - vk_host_free(user_context, cache_entry->shader_bindings[n].shared_memory_allocations, allocator->callbacks()); - cache_entry->shader_bindings[n].shared_memory_allocations = nullptr; - } - if (cache_entry->shader_bindings[n].compute_pipeline) { - vk_destroy_compute_pipeline(user_context, allocator, cache_entry->shader_bindings[n].compute_pipeline); - cache_entry->shader_bindings[n].compute_pipeline = {0}; - } - } - - vk_host_free(user_context, cache_entry->shader_bindings, allocator->callbacks()); - cache_entry->shader_bindings = nullptr; - } - if (cache_entry->shader_module) { - debug(user_context) << " . destroying shader module " << (void *)cache_entry->shader_module << "\n"; - vkDestroyShaderModule(allocator->current_device(), cache_entry->shader_module, allocator->callbacks()); - cache_entry->shader_module = {0}; - } - cache_entry->shader_count = 0; - vk_host_free(user_context, cache_entry, allocator->callbacks()); - cache_entry = nullptr; + if (shader_module->shader_bindings) { + for (uint32_t n = 0; n < shader_module->shader_count; n++) { + if (shader_module->shader_bindings[n].args_region) { + vk_destroy_scalar_uniform_buffer(user_context, allocator, shader_module->shader_bindings[n].args_region); + shader_module->shader_bindings[n].args_region = nullptr; + } + if (shader_module->shader_bindings[n].descriptor_pool) { + vk_destroy_descriptor_pool(user_context, allocator, shader_module->shader_bindings[n].descriptor_pool); + shader_module->shader_bindings[n].descriptor_pool = VK_NULL_HANDLE; + } + if (shader_module->shader_bindings[n].specialization_constants) { + vk_host_free(user_context, shader_module->shader_bindings[n].specialization_constants, allocator->callbacks()); + shader_module->shader_bindings[n].specialization_constants = nullptr; + } + if (shader_module->shader_bindings[n].shared_memory_allocations) { + vk_host_free(user_context, shader_module->shader_bindings[n].shared_memory_allocations, allocator->callbacks()); + shader_module->shader_bindings[n].shared_memory_allocations = nullptr; + } + if (shader_module->shader_bindings[n].compute_pipeline) { + vk_destroy_compute_pipeline(user_context, allocator, shader_module->shader_bindings[n].compute_pipeline); + shader_module->shader_bindings[n].compute_pipeline = VK_NULL_HANDLE; } } - }; + vk_host_free(user_context, shader_module->shader_bindings, allocator->callbacks()); + shader_module->shader_bindings = nullptr; + } + if (shader_module->shader_module) { + debug(user_context) << " . destroying shader module " << (void *)shader_module->shader_module << "\n"; + vkDestroyShaderModule(allocator->current_device(), shader_module->shader_module, allocator->callbacks()); + shader_module->shader_module = VK_NULL_HANDLE; + } + shader_module->shader_count = 0; + vk_host_free(user_context, shader_module, allocator->callbacks()); + shader_module = nullptr; +} + +void vk_destroy_compilation_cache_entry(VulkanCompilationCacheEntry *cache_entry) { + void *user_context = nullptr; + debug(user_context) + << " vk_destroy_compilation_cache_entry (cache_entry: " << cache_entry << ")\n"; + + if (cache_entry == nullptr) { + return; + } - DestroyShaderModule module_destructor(user_context, allocator); - compilation_cache.delete_context(user_context, allocator->current_device(), module_destructor); + VulkanMemoryAllocator *allocator = cache_entry->allocator; + if (allocator == nullptr) { + return; + } + + for (uint32_t m = 0; m < cache_entry->module_count; m++) { + VulkanCompiledShaderModule *shader_module = cache_entry->compiled_modules[m]; + vk_destroy_compiled_shader_module(shader_module, allocator); + } + + cache_entry->module_count = 0; + cache_entry->allocator = nullptr; + vk_host_free(user_context, cache_entry, allocator->callbacks()); + cache_entry = nullptr; +} + +int vk_destroy_shader_modules(void *user_context, VulkanMemoryAllocator *allocator) { + + debug(user_context) + << " vk_destroy_shader_modules (user_context: " << user_context << ")\n"; + +#ifdef DEBUG_RUNTIME + uint64_t t_before = halide_current_time_ns(user_context); +#endif + if (allocator != nullptr) { + compilation_cache.delete_context(user_context, allocator->current_device(), vk_destroy_compilation_cache_entry); + } #ifdef DEBUG_RUNTIME uint64_t t_after = halide_current_time_ns(user_context); diff --git a/test/correctness/async_copy_chain.cpp b/test/correctness/async_copy_chain.cpp index ae8623d446bb..238b1eb8821b 100644 --- a/test/correctness/async_copy_chain.cpp +++ b/test/correctness/async_copy_chain.cpp @@ -20,11 +20,15 @@ void make_pipeline(Func &A, Func &B) { } int main(int argc, char **argv) { - if (get_jit_target_from_environment().arch == Target::WebAssembly) { + Target target = get_jit_target_from_environment(); + if (target.arch == Target::WebAssembly) { printf("[SKIP] WebAssembly does not support async() yet.\n"); return 0; } - + if (target.has_feature(Target::Vulkan) && (target.os == Target::Windows)) { + printf("[SKIP] Skipping test for Vulkan on Windows ... fails unless run on its own!\n"); + return 0; + } // Make a list of extern pipeline stages (just copies) all async // and connected by double buffers, then try various nestings of // them. This is a stress test of the async extern storage folding diff --git a/test/correctness/gpu_allocation_cache.cpp b/test/correctness/gpu_allocation_cache.cpp index 51b03e73f82c..1d58c526007f 100644 --- a/test/correctness/gpu_allocation_cache.cpp +++ b/test/correctness/gpu_allocation_cache.cpp @@ -28,6 +28,10 @@ int main(int argc, char **argv) { printf("[SKIP] Skipping test for Vulkan on iOS/OSX (MoltenVK only allows 30 buffers to be allocated)!\n"); return 0; } + if (target.has_feature(Target::Vulkan) && (target.os == Target::Windows)) { + printf("[SKIP] Skipping test for Vulkan on Windows ... fails unless run on its own!\n"); + return 0; + } if (target.has_feature(Target::WebGPU)) { printf("[SKIP] Allocation cache not yet implemented for WebGPU.\n"); return 0; @@ -152,26 +156,20 @@ int main(int argc, char **argv) { f.get(); } - // Vulkan will OOM unless allocation cache is used ... skip this since we just ran the same tests above concurrently - if (!target.has_feature(Target::Vulkan)) { - - // Now benchmark with and without, (just informational, as this isn't a performance test) - double t1 = Tools::benchmark([&]() { - test1(true, false); - test2(true, false); - test3(true, false); - }); - - double t2 = Tools::benchmark([&]() { - test1(false, false); - test2(false, false); - test3(false, false); - }); - - printf("Runtime with cache: %f\n" - "Without cache: %f\n", - t1, t2); - } + // Now benchmark with and without, (just informational, as this isn't a performance test) + double t1 = Tools::benchmark([&]() { + test1(true, false); + test2(true, false); + test3(true, false); + }); + double t2 = Tools::benchmark([&]() { + test1(false, false); + test2(false, false); + test3(false, false); + }); + printf("Runtime with cache: %f\n" + "Without cache: %f\n", + t1, t2); printf("Success!\n"); return 0; diff --git a/test/correctness/gpu_object_lifetime_1.cpp b/test/correctness/gpu_object_lifetime_1.cpp index ce11522aed6f..ee597afd06d8 100644 --- a/test/correctness/gpu_object_lifetime_1.cpp +++ b/test/correctness/gpu_object_lifetime_1.cpp @@ -19,13 +19,6 @@ int main(int argc, char *argv[]) { Target target = get_jit_target_from_environment(); - // Disable the Vulkan validation layer or we'll leak - // https://github.com/halide/Halide/issues/8290 - if (target.has_feature(Target::Vulkan)) { - char clear_env_var[] = "VK_INSTANCE_LAYERS="; - putenv(clear_env_var); - } - // We need to hook the default handler too, to catch the frees done by release_all JITHandlers handlers; handlers.custom_print = halide_print; diff --git a/test/correctness/gpu_object_lifetime_2.cpp b/test/correctness/gpu_object_lifetime_2.cpp index ade23b1d15e8..17010576fb99 100644 --- a/test/correctness/gpu_object_lifetime_2.cpp +++ b/test/correctness/gpu_object_lifetime_2.cpp @@ -18,13 +18,6 @@ int main(int argc, char *argv[]) { Target target = get_jit_target_from_environment(); - // Disable the Vulkan validation layer or we'll leak - // https://github.com/halide/Halide/issues/8290 - if (target.has_feature(Target::Vulkan)) { - char clear_env_var[] = "VK_INSTANCE_LAYERS="; - putenv(clear_env_var); - } - // We need to hook the default handler too, to catch the frees done by release_all JITHandlers handlers; handlers.custom_print = halide_print; diff --git a/test/correctness/gpu_object_lifetime_3.cpp b/test/correctness/gpu_object_lifetime_3.cpp index aea0b6280292..edaee04ba9ff 100644 --- a/test/correctness/gpu_object_lifetime_3.cpp +++ b/test/correctness/gpu_object_lifetime_3.cpp @@ -18,13 +18,6 @@ int main(int argc, char *argv[]) { Target target = get_jit_target_from_environment(); - // Disable the Vulkan validation layer or we'll leak - // https://github.com/halide/Halide/issues/8290 - if (target.has_feature(Target::Vulkan)) { - char clear_env_var[] = "VK_INSTANCE_LAYERS="; - putenv(clear_env_var); - } - // We need to hook the default handler too, to catch the frees done by release_all JITHandlers handlers; handlers.custom_print = halide_print; diff --git a/test/correctness/leak_device_memory.cpp b/test/correctness/leak_device_memory.cpp index fcded0edac54..567aeddb5fd8 100644 --- a/test/correctness/leak_device_memory.cpp +++ b/test/correctness/leak_device_memory.cpp @@ -21,13 +21,6 @@ int main(int argc, char **argv) { Target target = get_jit_target_from_environment(); - // Disable the Vulkan validation layer or we'll leak - // https://github.com/halide/Halide/issues/8290 - if (target.has_feature(Target::Vulkan)) { - char clear_env_var[] = "VK_INSTANCE_LAYERS="; - putenv(clear_env_var); - } - // We need debug output to record object creation. target.set_feature(Target::Debug); diff --git a/test/performance/wrap.cpp b/test/performance/wrap.cpp index 9c3c0af66dbf..9a572a38c10b 100644 --- a/test/performance/wrap.cpp +++ b/test/performance/wrap.cpp @@ -115,11 +115,6 @@ int main(int argc, char **argv) { printf("[SKIP] Performance tests are meaningless and/or misleading under WebAssembly interpreter.\n"); return 0; } - if (target.has_feature(Target::Vulkan)) { - // FIXME: See issue #7559 : performance_wrap test results don't match - printf("[SKIP] Skipping test for Vulkan. Wrap results don't match ... \n"); - return 0; - } if (!target.has_gpu_feature()) { printf("[SKIP] No GPU target enabled.\n"); return 0; diff --git a/vcpkg.json b/vcpkg.json index c8e2888cc678..49314b519f30 100644 --- a/vcpkg.json +++ b/vcpkg.json @@ -1,11 +1,11 @@ { "name": "halide", - "version": "19.0.0", + "version": "20.0.0", "description": "Halide is a programming language designed to make it easier to write high-performance image and array processing code on modern machines.", "homepage": "https://github.com/halide/Halide", "license": "MIT", "supports": "!uwp", - "builtin-baseline": "4fa096bce24561bf869e56384e655f4b2bab71a8", + "builtin-baseline": "d567b667adba0e72c5c3931ddbe745b66aa34b73", "default-features": [ "jit", "serialization" @@ -24,7 +24,8 @@ "tools" ] }, - "spirv-headers" + "spirv-headers", + "vulkan-headers" ], "features": { "developer": {